diff --git a/.coveragerc b/.coveragerc
index d097511c..34417c3f 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -11,3 +11,5 @@ exclude_lines =
def __repr__
# Ignore abstract methods
raise NotImplementedError
+ # Ignore coverage for code specific to static type checkers
+ TYPE_CHECKING
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 00000000..51b21a62
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,17 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb
+# created: 2025-04-10T17:00:10.042601326Z
diff --git a/google/__init__.py b/.github/.OwlBot.yaml
similarity index 70%
rename from google/__init__.py
rename to .github/.OwlBot.yaml
index 0d0a4c3a..c8b40cc7 100644
--- a/google/__init__.py
+++ b/.github/.OwlBot.yaml
@@ -1,4 +1,4 @@
-# Copyright 2016 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,13 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Google namespace package."""
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
-try:
- import pkg_resources
+begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..1b023b72
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,12 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
+
+# @googleapis/yoshi-python @googleapis/actools-python are the default owners for changes in this repo
+* @googleapis/yoshi-python @googleapis/actools-python
+
+# @googleapis/python-samples-reviewers @googleapis/actools-python are the default owners for samples changes
+/samples/ @googleapis/python-samples-reviewers @googleapis/actools-python
diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml
new file mode 100644
index 00000000..311ebbb8
--- /dev/null
+++ b/.github/auto-approve.yml
@@ -0,0 +1,3 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve
+processes:
+ - "OwlBotTemplateChanges"
diff --git a/tests/unit/test_general_helpers.py b/.github/auto-label.yaml
similarity index 53%
rename from tests/unit/test_general_helpers.py
rename to .github/auto-label.yaml
index 027d4892..21786a4e 100644
--- a/tests/unit/test_general_helpers.py
+++ b/.github/auto-label.yaml
@@ -1,4 +1,4 @@
-# Copyright 2017, Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,31 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+requestsize:
+ enabled: true
-import functools
-
-from google.api_core import general_helpers
-
-
-def test_wraps_normal_func():
- def func():
- return 42
-
- @general_helpers.wraps(func)
- def replacement():
- return func()
-
- assert replacement() == 42
-
-
-def test_wraps_partial():
- def func():
- return 42
-
- partial = functools.partial(func)
-
- @general_helpers.wraps(partial)
- def replacement():
- return func()
-
- assert replacement() == 42
+path:
+ pullrequest: true
+ paths:
+ samples: "samples"
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
new file mode 100644
index 00000000..1618464d
--- /dev/null
+++ b/.github/blunderbuss.yml
@@ -0,0 +1,17 @@
+# Blunderbuss config
+#
+# This file controls who is assigned for pull requests and issues.
+# Note: This file is autogenerated. To make changes to the assignee
+# team, please update `codeowner_team` in `.repo-metadata.json`.
+assign_issues:
+ - googleapis/actools-python
+
+assign_issues_by:
+ - labels:
+ - "samples"
+ to:
+ - googleapis/python-samples-reviewers
+ - googleapis/actools-python
+
+assign_prs:
+ - googleapis/actools-python
diff --git a/.github/cherry-pick-bot.yml b/.github/cherry-pick-bot.yml
new file mode 100644
index 00000000..1e9cfcd3
--- /dev/null
+++ b/.github/cherry-pick-bot.yml
@@ -0,0 +1,2 @@
+enabled: true
+
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 00000000..6fe78aa7
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.github/release-please.yml b/.github/release-please.yml
index 4507ad05..29601ad4 100644
--- a/.github/release-please.yml
+++ b/.github/release-please.yml
@@ -1 +1,11 @@
releaseType: python
+handleGHRelease: true
+# NOTE: this section is generated by synthtool.languages.python
+# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py
+branches:
+- branch: v1
+ handleGHRelease: true
+ releaseType: python
+- branch: v0
+ handleGHRelease: true
+ releaseType: python
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
new file mode 100644
index 00000000..50e8bd30
--- /dev/null
+++ b/.github/release-trigger.yml
@@ -0,0 +1,2 @@
+enabled: true
+multiScmName: python-api-core
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 00000000..e69de29b
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
new file mode 100644
index 00000000..b724bada
--- /dev/null
+++ b/.github/sync-repo-settings.yaml
@@ -0,0 +1,60 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings
+# Rules for main branch protection
+branchProtectionRules:
+# Identifies the protection rule pattern. Name of the branch to be protected.
+# Defaults to `main`
+- pattern: main
+ requiresCodeOwnerReviews: true
+ requiresStrictStatusChecks: true
+ requiredStatusCheckContexts:
+ - 'cla/google'
+ # No Kokoro: the following are Github actions
+ - 'lint'
+ - 'mypy'
+ - 'unit_grpc_gcp-3.7'
+ - 'unit_grpc_gcp-3.8'
+ - 'unit_grpc_gcp-3.9'
+ - 'unit_grpc_gcp-3.10'
+ - 'unit_grpc_gcp-3.11'
+ - 'unit_grpc_gcp-3.12'
+ - 'unit_grpc_gcp-3.13'
+ - 'unit_grpc_gcp-3.14'
+ - 'unit-3.7'
+ - 'unit-3.8'
+ - 'unit-3.9'
+ - 'unit-3.10'
+ - 'unit-3.11'
+ - 'unit-3.12'
+ - 'unit-3.13'
+ - 'unit-3.14'
+ - 'unit_wo_grpc-3.10'
+ - 'unit_wo_grpc-3.11'
+ - 'unit_wo_grpc-3.12'
+ - 'unit_wo_grpc-3.13'
+ - 'unit_wo_grpc-3.14'
+ - 'unit_w_prerelease_deps-3.7'
+ - 'unit_w_prerelease_deps-3.8'
+ - 'unit_w_prerelease_deps-3.9'
+ - 'unit_w_prerelease_deps-3.10'
+ - 'unit_w_prerelease_deps-3.11'
+ - 'unit_w_prerelease_deps-3.12'
+ - 'unit_w_prerelease_deps-3.13'
+ - 'unit_w_prerelease_deps-3.14'
+ - 'unit_w_async_rest_extra-3.7'
+ - 'unit_w_async_rest_extra-3.8'
+ - 'unit_w_async_rest_extra-3.9'
+ - 'unit_w_async_rest_extra-3.10'
+ - 'unit_w_async_rest_extra-3.11'
+ - 'unit_w_async_rest_extra-3.12'
+ - 'unit_w_async_rest_extra-3.13'
+ - 'unit_w_async_rest_extra-3.14'
+ - 'cover'
+ - 'docs'
+ - 'docfx'
+permissionRules:
+ - team: actools-python
+ permission: admin
+ - team: actools
+ permission: admin
+ - team: yoshi-python
+ permission: push
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 00000000..2833fe98
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,38 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: docs
+jobs:
+ docs:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docs
+ run: |
+ nox -s docs
+ docfx:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docfx
+ run: |
+ nox -s docfx
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000..1051da0b
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,25 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: lint
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run lint
+ run: |
+ nox -s lint
+ - name: Run lint_setup_py
+ run: |
+ nox -s lint_setup_py
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
new file mode 100644
index 00000000..e6a79291
--- /dev/null
+++ b/.github/workflows/mypy.yml
@@ -0,0 +1,22 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: mypy
+jobs:
+ mypy:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run mypy
+ run: |
+ nox -s mypy
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
new file mode 100644
index 00000000..f260a6a5
--- /dev/null
+++ b/.github/workflows/unittest.yml
@@ -0,0 +1,82 @@
+name: "Unit tests"
+
+on:
+ pull_request:
+ branches:
+ - main
+
+jobs:
+ run-unittests:
+ name: unit${{ matrix.option }}-${{ matrix.python }}
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed.
+ # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix
+ # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
+ runs-on: ubuntu-22.04
+ strategy:
+ matrix:
+ option: ["", "_grpc_gcp", "_wo_grpc", "_w_prerelease_deps", "_w_async_rest_extra"]
+ python:
+ - "3.7"
+ - "3.8"
+ - "3.9"
+ - "3.10"
+ - "3.11"
+ - "3.12"
+ - "3.13"
+ - "3.14"
+ exclude:
+ - option: "_wo_grpc"
+ python: 3.7
+ - option: "_wo_grpc"
+ python: 3.8
+ - option: "_wo_grpc"
+ python: 3.9
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python }}
+ allow-prereleases: true
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run unit tests
+ env:
+ COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }}
+ run: |
+ nox -s unit${{ matrix.option }}-${{ matrix.python }}
+ - name: Upload coverage results
+ uses: actions/upload-artifact@v4
+ with:
+ name: coverage-artifact-${{ matrix.option }}-${{ matrix.python }}
+ path: .coverage${{ matrix.option }}-${{ matrix.python }}
+ include-hidden-files: true
+
+ report-coverage:
+ name: cover
+ runs-on: ubuntu-latest
+ needs:
+ - run-unittests
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install coverage
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install coverage
+ - name: Download coverage results
+ uses: actions/download-artifact@v4
+ with:
+ path: .coverage-results/
+ - name: Report coverage results
+ run: |
+ find .coverage-results -type f -name '*.zip' -exec unzip {} \;
+ coverage combine .coverage-results/**/.coverage*
+ coverage report --show-missing --fail-under=100
diff --git a/.gitignore b/.gitignore
index 157bfb33..168b201f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -29,7 +29,7 @@ pip-log.txt
.nox
.cache
.pytest_cache
-pytype_output
+.pytype
# Mac
@@ -47,11 +47,15 @@ pytype_output
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
+venv/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 5390f032..d41b45aa 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,13 @@
set -eo pipefail
-cd github/python-api-core
+CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
+
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
+fi
+
+pushd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -24,16 +30,31 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Setup service account credentials.
-export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
+then
+ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+fi
# Setup project id.
-export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
-
-python3.6 -m nox
+if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
+then
+ export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+fi
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
+
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3 -m nox -s ${NOX_SESSION:-}
+else
+ python3 -m nox
+fi
diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/continuous/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
deleted file mode 100644
index 3a5cabd6..00000000
--- a/.kokoro/docs/common.cfg
+++ /dev/null
@@ -1,48 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/publish-docs.sh"
-}
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "docs-staging"
-}
-
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "docuploader_service_account"
- }
- }
-}
\ No newline at end of file
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/docs/docs.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 00000000..c435402f
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2024 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/presubmit/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
deleted file mode 100755
index db50cebf..00000000
--- a/.kokoro/publish-docs.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/bash
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-cd github/python-api-core
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
-
-# build docs
-nox -s docs
-
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
-
-# create metadata
-python3 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
deleted file mode 100755
index 929c7822..00000000
--- a/.kokoro/release.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Start the releasetool reporter
-python3 -m pip install gcp-releasetool
-python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-
-# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
-python3 -m pip install --upgrade twine wheel setuptools
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
-cd github/python-api-core
-python3 setup.py sdist bdist_wheel
-twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
deleted file mode 100644
index dbe45a62..00000000
--- a/.kokoro/release/common.cfg
+++ /dev/null
@@ -1,64 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/release.sh"
-}
-
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
-}
-
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/release/release.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index f5dddb4b..1a2b87b2 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.10/common.cfg
similarity index 78%
rename from .kokoro/samples/python3.6/common.cfg
rename to .kokoro/samples/python3.10/common.cfg
index 0a790707..40fb8d81 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -10,7 +10,13 @@ action {
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-3.6"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
}
env_vars: {
@@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.10/continuous.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/presubmit.cfg
rename to .kokoro/samples/python3.10/continuous.cfg
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
similarity index 98%
rename from .kokoro/samples/python3.6/periodic.cfg
rename to .kokoro/samples/python3.10/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg
new file mode 100644
index 00000000..d3597f08
--- /dev/null
+++ b/.kokoro/samples/python3.11/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.11"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-311"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg
similarity index 97%
rename from .kokoro/samples/python3.6/continuous.cfg
rename to .kokoro/samples/python3.11/continuous.cfg
index 7218af14..a1c8d975 100644
--- a/.kokoro/samples/python3.6/continuous.cfg
+++ b/.kokoro/samples/python3.11/continuous.cfg
@@ -3,5 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "True"
-}
-
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.11/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.11/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.11/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
new file mode 100644
index 00000000..8a5840a7
--- /dev/null
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.12"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-312"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
new file mode 100644
index 00000000..2a4199f4
--- /dev/null
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.13"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-313"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 6eaddc31..a3aa10b5 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-api-core/.kokoro/test-samples.sh"
@@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 461431a4..20c941aa 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-api-core/.kokoro/test-samples.sh"
@@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 00000000..234887c6
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 00000000..e9d8bd79
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 00000000..53e365bc
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples don't exist
+if ! find samples -name 'requirements.txt' | grep -q .; then
+ echo "No tests run. './samples/**/requirements.txt' not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.9 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 653239fb..7933d820 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -20,85 +24,21 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-api-core
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the Build Cop Bot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
- $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
\ No newline at end of file
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251f..48f79699 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2017 Google Inc.
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 00000000..35fa5292
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For FlakyBot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..1d74695f
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,31 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 23.7.0
+ hooks:
+ - id: black
+- repo: https://github.com/pycqa/flake8
+ rev: 6.1.0
+ hooks:
+ - id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 15dd87dd..0f0abd93 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -1,9 +1,12 @@
{
- "name": "google-api-core",
- "name_pretty": "Google API client core library",
- "client_documentation": "https://googleapis.dev/python/google-api-core/latest",
- "release_level": "ga",
- "language": "python",
- "repo": "googleapis/python-api-core",
- "distribution_name": "google-api-core"
-}
\ No newline at end of file
+ "name": "google-api-core",
+ "name_pretty": "Google API client core library",
+ "client_documentation": "https://googleapis.dev/python/google-api-core/latest",
+ "release_level": "stable",
+ "language": "python",
+ "library_type": "CORE",
+ "repo": "googleapis/python-api-core",
+ "distribution_name": "google-api-core",
+ "default_version": "",
+ "codeowner_team": "@googleapis/actools-python"
+}
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 00000000..00801523
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,61 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add required env vars here.
+required_envvars+=(
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 79ee2556..98a6b8d7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,642 @@
[1]: https://pypi.org/project/google-api-core/#history
-### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
+## [2.25.0](https://github.com/googleapis/python-api-core/compare/v2.24.2...v2.25.0) (2025-05-06)
+
+
+### Features
+
+* Add protobuf runtime version to `x-goog-api-client` header ([#812](https://github.com/googleapis/python-api-core/issues/812)) ([118bd96](https://github.com/googleapis/python-api-core/commit/118bd96f3907234351972409834ab5309cdfcee4))
+* Support dynamic retry backoff values ([#793](https://github.com/googleapis/python-api-core/issues/793)) ([70697a3](https://github.com/googleapis/python-api-core/commit/70697a3e39c389768e724fddacb3c9b97d609384))
+
+
+### Bug Fixes
+
+* Resolve issue where pre-release versions of dependencies are installed ([#808](https://github.com/googleapis/python-api-core/issues/808)) ([1ca7973](https://github.com/googleapis/python-api-core/commit/1ca7973a395099403be1a99c7c4583a8f22d5d8e))
+
+## [2.24.2](https://github.com/googleapis/python-api-core/compare/v2.24.1...v2.24.2) (2025-03-06)
+
+
+### Bug Fixes
+
+* **deps:** Allow protobuf 6.x ([#804](https://github.com/googleapis/python-api-core/issues/804)) ([687be7c](https://github.com/googleapis/python-api-core/commit/687be7cbf629a61feb43ef37d3d920fa32b2d636))
+
+## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24)
+
+
+### Bug Fixes
+
+* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19))
+* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6))
+
+
+### Documentation
+
+* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227))
+
+## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06)
+
+
+### Features
+
+* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06))
+* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f))
+
+## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11)
+
+
+### Features
+
+* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55))
+
+## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25)
+
+
+### Features
+
+* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5))
+
+
+### Bug Fixes
+
+* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954))
+* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763))
+* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3))
+* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf))
+
+## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07)
+
+
+### Features
+
+* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a))
+
+
+### Bug Fixes
+
+* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51))
+
+## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18)
+
+
+### Features
+
+* Add async unsupported paramater exception ([#694](https://github.com/googleapis/python-api-core/issues/694)) ([8c137fe](https://github.com/googleapis/python-api-core/commit/8c137feb6e880fdd93d1248d9b6c10002dc3c096))
+* Add support for asynchronous rest streaming ([#686](https://github.com/googleapis/python-api-core/issues/686)) ([1b7bb6d](https://github.com/googleapis/python-api-core/commit/1b7bb6d1b721e4ee1561e8e4a347846d7fdd7c27))
+* Add support for creating exceptions from an asynchronous response ([#688](https://github.com/googleapis/python-api-core/issues/688)) ([1c4b0d0](https://github.com/googleapis/python-api-core/commit/1c4b0d079f2103a7b5562371a7bd1ada92528de3))
+
+## [2.19.2](https://github.com/googleapis/python-api-core/compare/v2.19.1...v2.19.2) (2024-08-16)
+
+
+### Bug Fixes
+
+* Fail gracefully if could not import `rpc_status` module ([#680](https://github.com/googleapis/python-api-core/issues/680)) ([7ccbf57](https://github.com/googleapis/python-api-core/commit/7ccbf5738fa236649f9a155055c71789362b5c4c))
+
+## [2.19.1](https://github.com/googleapis/python-api-core/compare/v2.19.0...v2.19.1) (2024-06-19)
+
+
+### Bug Fixes
+
+* Add support for protobuf 5.x ([#644](https://github.com/googleapis/python-api-core/issues/644)) ([fda0ca6](https://github.com/googleapis/python-api-core/commit/fda0ca6f0664ac5044671591ed62618175a7393f))
+* Ignore unknown fields in rest streaming. ([#651](https://github.com/googleapis/python-api-core/issues/651)) ([1203fb9](https://github.com/googleapis/python-api-core/commit/1203fb97d2685535f89113e944c4764c1deb595e))
+
+## [2.19.0](https://github.com/googleapis/python-api-core/compare/v2.18.0...v2.19.0) (2024-04-29)
+
+
+### Features
+
+* Add google.api_core.version_header ([#638](https://github.com/googleapis/python-api-core/issues/638)) ([a7b53e9](https://github.com/googleapis/python-api-core/commit/a7b53e9e9a7deb88baf92a2827958429e3677069))
+
+## [2.18.0](https://github.com/googleapis/python-api-core/compare/v2.17.1...v2.18.0) (2024-03-20)
+
+
+### Features
+
+* Add common logic for supporting universe domain ([#621](https://github.com/googleapis/python-api-core/issues/621)) ([94f2ca3](https://github.com/googleapis/python-api-core/commit/94f2ca3b4d094e6e10154634d3463d07ebea2035))
+
+
+### Bug Fixes
+
+* Add _registered_method to grpc ChannelStub ([#614](https://github.com/googleapis/python-api-core/issues/614)) ([5eaaea8](https://github.com/googleapis/python-api-core/commit/5eaaea8a989f8bdbdb5fbc95a155a20837c87f42))
+* **deps:** Require proto-plus >= 1.22.3 ([#626](https://github.com/googleapis/python-api-core/issues/626)) ([4fed37c](https://github.com/googleapis/python-api-core/commit/4fed37cbc32122f156e38250b5fa8b2b08a787a1))
+
+## [2.17.1](https://github.com/googleapis/python-api-core/compare/v2.17.0...v2.17.1) (2024-02-13)
+
+
+### Bug Fixes
+
+* Resolve issue handling protobuf responses in rest streaming ([#604](https://github.com/googleapis/python-api-core/issues/604)) ([bcebc92](https://github.com/googleapis/python-api-core/commit/bcebc92eca69dae81c5e546d526c92b164a6b3b4))
+
+## [2.17.0](https://github.com/googleapis/python-api-core/compare/v2.16.2...v2.17.0) (2024-02-06)
+
+
+### Features
+
+* Add attempt_direct_path argument to create_channel ([#583](https://github.com/googleapis/python-api-core/issues/583)) ([94726e7](https://github.com/googleapis/python-api-core/commit/94726e739698035b00667983f854c600252abd28))
+
+
+### Bug Fixes
+
+* Retry constructors methods support None ([#592](https://github.com/googleapis/python-api-core/issues/592)) ([416203c](https://github.com/googleapis/python-api-core/commit/416203c1888934670bfeccafe5f5469f87314512))
+
+## [2.16.2](https://github.com/googleapis/python-api-core/compare/v2.16.1...v2.16.2) (2024-02-02)
+
+
+### Bug Fixes
+
+* Spelling error `a,out` -> `amount` ([#596](https://github.com/googleapis/python-api-core/issues/596)) ([88688b1](https://github.com/googleapis/python-api-core/commit/88688b1625c4dab0df6124a0560f550eb322500f))
+
+## [2.16.1](https://github.com/googleapis/python-api-core/compare/v2.16.0...v2.16.1) (2024-01-30)
+
+
+### Bug Fixes
+
+* Fix broken import for google.api_core.retry_async.AsyncRetry ([#587](https://github.com/googleapis/python-api-core/issues/587)) ([ac012c0](https://github.com/googleapis/python-api-core/commit/ac012c04c69b8bbe72962f0d0d9e9536c0b4a524))
+
+## [2.16.0](https://github.com/googleapis/python-api-core/compare/v2.15.0...v2.16.0) (2024-01-29)
+
+
+### Features
+
+* Retry and retry_async support streaming rpcs ([#495](https://github.com/googleapis/python-api-core/issues/495)) ([17ff5f1](https://github.com/googleapis/python-api-core/commit/17ff5f1d83a9a6f50a0226fb0e794634bd584f17))
+
+## [2.15.0](https://github.com/googleapis/python-api-core/compare/v2.14.0...v2.15.0) (2023-12-07)
+
+
+### Features
+
+* Add support for Python 3.12 ([#557](https://github.com/googleapis/python-api-core/issues/557)) ([091b4f1](https://github.com/googleapis/python-api-core/commit/091b4f1c7fcc59c3f2a02ee44fd3c30b78423f12))
+* Add type annotations to wrapped grpc calls ([#554](https://github.com/googleapis/python-api-core/issues/554)) ([fc12b40](https://github.com/googleapis/python-api-core/commit/fc12b40bfc6e0c4bb313196e2e3a9c9374ce1c45))
+* Add universe_domain argument to ClientOptions ([3069ef4](https://github.com/googleapis/python-api-core/commit/3069ef4b9123ddb64841cbb7bbb183b53d502e0a))
+* Introduce compatibility with native namespace packages ([#561](https://github.com/googleapis/python-api-core/issues/561)) ([bd82827](https://github.com/googleapis/python-api-core/commit/bd82827108f1eeb6c05cfacf6c044b2afacc18a2))
+
+
+### Bug Fixes
+
+* Fix regression in `bidi` causing `Thread-ConsumeBidirectionalStream caught unexpected exception and will exit` ([#562](https://github.com/googleapis/python-api-core/issues/562)) ([40c8ae0](https://github.com/googleapis/python-api-core/commit/40c8ae0cf1f797e31e106461164e22db4fb2d3d9))
+* Replace deprecated `datetime.datetime.utcnow()` ([#552](https://github.com/googleapis/python-api-core/issues/552)) ([448923a](https://github.com/googleapis/python-api-core/commit/448923acf277a70e8704c949311bf4feaef8cab6)), closes [#540](https://github.com/googleapis/python-api-core/issues/540)
+
+## [2.14.0](https://github.com/googleapis/python-api-core/compare/v2.13.1...v2.14.0) (2023-11-09)
+
+
+### Features
+
+* Support with_call for wrapped rpcs ([#550](https://github.com/googleapis/python-api-core/issues/550)) ([01a57a7](https://github.com/googleapis/python-api-core/commit/01a57a745f4c8345c9c93412c27dd416b49f5953))
+
+## [2.13.1](https://github.com/googleapis/python-api-core/compare/v2.13.0...v2.13.1) (2023-11-09)
+
+
+### Bug Fixes
+
+* Update async client to use async retry ([#544](https://github.com/googleapis/python-api-core/issues/544)) ([f21bb32](https://github.com/googleapis/python-api-core/commit/f21bb32b8e6310116a642a6e6b6dd8e44e30e656))
+
+## [2.13.0](https://github.com/googleapis/python-api-core/compare/v2.12.0...v2.13.0) (2023-11-03)
+
+
+### Features
+
+* Add caching to routing header calculation ([#526](https://github.com/googleapis/python-api-core/issues/526)) ([6251eab](https://github.com/googleapis/python-api-core/commit/6251eab3fca5f7e509cb9b6e476ce1184094b711))
+
+
+### Bug Fixes
+
+* Add warning to retry target to avoid incorrect usage ([#543](https://github.com/googleapis/python-api-core/issues/543)) ([bfb40e6](https://github.com/googleapis/python-api-core/commit/bfb40e6929ef47be7a6464d2f1e0d06595736b8d))
+* Drop usage of distutils ([#541](https://github.com/googleapis/python-api-core/issues/541)) ([4bd9e10](https://github.com/googleapis/python-api-core/commit/4bd9e10f20eea227c88e3e1496010cca6dd8a270))
+* Ensure exception is available when BackgroundConsumer open stream fails ([#357](https://github.com/googleapis/python-api-core/issues/357)) ([405272c](https://github.com/googleapis/python-api-core/commit/405272c05f8c6d20e242c6172b01f78f0fd3bf32))
+
+## [2.12.0](https://github.com/googleapis/python-api-core/compare/v2.11.1...v2.12.0) (2023-09-07)
+
+
+### Features
+
+* Add a little bit of typing to google.api_core.retry ([#453](https://github.com/googleapis/python-api-core/issues/453)) ([2477ab9](https://github.com/googleapis/python-api-core/commit/2477ab9ea5c2e863a493fb7ebebaa429a44ea096))
+* Add grpc Compression argument to channels and methods ([#451](https://github.com/googleapis/python-api-core/issues/451)) ([bdebd63](https://github.com/googleapis/python-api-core/commit/bdebd6331f9c0d3d1a8ceaf274f07d2ed75bfe92))
+
+
+### Documentation
+
+* Fix a typo in google/api_core/page_iterator.py ([#511](https://github.com/googleapis/python-api-core/issues/511)) ([c0ce73c](https://github.com/googleapis/python-api-core/commit/c0ce73c4de53ad694fe36d17408998aa1230398f))
+
+## [2.11.1](https://github.com/googleapis/python-api-core/compare/v2.11.0...v2.11.1) (2023-06-12)
+
+
+### Bug Fixes
+
+* Add actionable errors for GCE long running operations ([#498](https://github.com/googleapis/python-api-core/issues/498)) ([7dfc3a7](https://github.com/googleapis/python-api-core/commit/7dfc3a7a439243f05238a11b68a31720fde1769e))
+* Invalid `dev` version identifiers in `setup.py` ([#505](https://github.com/googleapis/python-api-core/issues/505)) ([8844edb](https://github.com/googleapis/python-api-core/commit/8844edb1e802040810918a12bc9ff89104da38d4))
+
+## [2.11.0](https://github.com/googleapis/python-api-core/compare/v2.10.2...v2.11.0) (2022-11-10)
+
+
+### Features
+
+* Add support for Python 3.11 ([#466](https://github.com/googleapis/python-api-core/issues/466)) ([ff379e3](https://github.com/googleapis/python-api-core/commit/ff379e304c353bcab734e1c4706b74b356a1e932))
+* Allow representing enums with their unqualified symbolic names in headers ([#465](https://github.com/googleapis/python-api-core/issues/465)) ([522b98e](https://github.com/googleapis/python-api-core/commit/522b98ecc1ebd1c2280d3d7c73a02f6e4fb528d4))
+
+
+### Bug Fixes
+
+* Major refactoring of Polling, Retry and Timeout logic ([#462](https://github.com/googleapis/python-api-core/issues/462)) ([434253d](https://github.com/googleapis/python-api-core/commit/434253de16d9efdf984ddb64c409706cda1d5f82))
+* Require google-auth >= 2.14.1 ([#463](https://github.com/googleapis/python-api-core/issues/463)) ([7cc329f](https://github.com/googleapis/python-api-core/commit/7cc329fe1498b0a4285123448e4ea80c6a780d47))
+
+## [2.10.2](https://github.com/googleapis/python-api-core/compare/v2.10.1...v2.10.2) (2022-10-08)
+
+
+### Bug Fixes
+
+* **deps:** Allow protobuf 3.19.5 ([#459](https://github.com/googleapis/python-api-core/issues/459)) ([e949364](https://github.com/googleapis/python-api-core/commit/e949364ce3a2c4c3cdb2658054d4793aa942d999))
+
+## [2.10.1](https://github.com/googleapis/python-api-core/compare/v2.10.0...v2.10.1) (2022-09-14)
+
+
+### Bug Fixes
+
+* Improve transcoding error message ([#442](https://github.com/googleapis/python-api-core/issues/442)) ([538df80](https://github.com/googleapis/python-api-core/commit/538df80ed6d21f43b512a73853935f7a7b9bdf52))
+
+## [2.10.0](https://github.com/googleapis/python-api-core/compare/v2.9.0...v2.10.0) (2022-09-02)
+
+
+### Features
+
+* Add 'strict' to flatten_query_params to lower-case bools ([#433](https://github.com/googleapis/python-api-core/issues/433)) ([83678e9](https://github.com/googleapis/python-api-core/commit/83678e94e1081f9087b19c43f26fad4774184d66))
+
+## [2.9.0](https://github.com/googleapis/python-api-core/compare/v2.8.2...v2.9.0) (2022-09-01)
+
+
+### Features
+
+* Make grpc transcode logic work in terms of protobuf python objects ([#428](https://github.com/googleapis/python-api-core/issues/428)) ([c3ad8ea](https://github.com/googleapis/python-api-core/commit/c3ad8ea67447e3d8a1154d7a9221e116f60d425a))
+
+
+### Bug Fixes
+
+* Require python 3.7+ ([#410](https://github.com/googleapis/python-api-core/issues/410)) ([7ddb8c0](https://github.com/googleapis/python-api-core/commit/7ddb8c00e6be7ab6905a9a802ad1c3063fbfa46c))
+* Restore support for grpcio-gcp ([#418](https://github.com/googleapis/python-api-core/issues/418)) ([8c19609](https://github.com/googleapis/python-api-core/commit/8c19609d6244930bd91fd5f40ef9b5b65584c4a5))
+
+## [2.8.2](https://github.com/googleapis/python-api-core/compare/v2.8.1...v2.8.2) (2022-06-13)
+
+
+### Bug Fixes
+
+* **deps:** allow protobuf < 5.0.0 ([#400](https://github.com/googleapis/python-api-core/issues/400)) ([8f73d2e](https://github.com/googleapis/python-api-core/commit/8f73d2ee2d3af2201f877aa7e2f7361147759dc7))
+* drop support for grpc-gcp ([#401](https://github.com/googleapis/python-api-core/issues/401)) ([5da6733](https://github.com/googleapis/python-api-core/commit/5da6733a475c436efc11b14889af73b3a0e20379))
+
+
+### Documentation
+
+* fix changelog header to consistent size ([#394](https://github.com/googleapis/python-api-core/issues/394)) ([ac266e9](https://github.com/googleapis/python-api-core/commit/ac266e935bc4e7c6dff250384407e7a60d8dba90))
+* Fix typo in the BackgroundConsumer docstring ([#395](https://github.com/googleapis/python-api-core/issues/395)) ([0eb727f](https://github.com/googleapis/python-api-core/commit/0eb727f92314db3c4383754514f75a49ba02e27b))
+
+## [2.8.1](https://github.com/googleapis/python-api-core/compare/v2.8.0...v2.8.1) (2022-05-26)
+
+
+### Bug Fixes
+
+* **deps:** require googleapis-common-protos >= 1.56.2 ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
+* **deps:** require protobuf>= 3.15.0, <4.0.0dev ([#385](https://github.com/googleapis/python-api-core/issues/385)) ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
+
+## [2.8.0](https://github.com/googleapis/python-api-core/compare/v2.7.3...v2.8.0) (2022-05-18)
+
+
+### Features
+
+* adds support for audience in client_options ([#379](https://github.com/googleapis/python-api-core/issues/379)) ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
+* adds support for audience in client_options. ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
+
+## [2.7.3](https://github.com/googleapis/python-api-core/compare/v2.7.2...v2.7.3) (2022-04-29)
+
+
+### Bug Fixes
+
+* Avoid AttributeError if grpcio-status is not installed ([#370](https://github.com/googleapis/python-api-core/issues/370)) ([022add1](https://github.com/googleapis/python-api-core/commit/022add16266f9c07f0f88eea13472cc2e0bfc991))
+
+## [2.7.2](https://github.com/googleapis/python-api-core/compare/v2.7.1...v2.7.2) (2022-04-13)
+
+
+### Bug Fixes
+
+* allow grpc without grpcio-status ([#355](https://github.com/googleapis/python-api-core/issues/355)) ([112049e](https://github.com/googleapis/python-api-core/commit/112049e79f5a5b0a989d85d438a1bd29485f46f7))
+* remove dependency on pkg_resources ([#361](https://github.com/googleapis/python-api-core/issues/361)) ([523dbd0](https://github.com/googleapis/python-api-core/commit/523dbd0b10d37ffcf83fa751f0bad313f162abf1))
+
+## [2.7.1](https://github.com/googleapis/python-api-core/compare/v2.7.0...v2.7.1) (2022-03-09)
+
+
+### Bug Fixes
+
+* add more context to error message. ([#340](https://github.com/googleapis/python-api-core/issues/340)) ([0680fb4](https://github.com/googleapis/python-api-core/commit/0680fb4d3e013fe2de27e0a2ae2cd9896479e596))
+
+## [2.7.0](https://github.com/googleapis/python-api-core/compare/v2.6.1...v2.7.0) (2022-03-08)
+
+
+### Features
+
+* expose extra fields in ExtendedOperation ([#351](https://github.com/googleapis/python-api-core/issues/351)) ([9abc6f4](https://github.com/googleapis/python-api-core/commit/9abc6f48f23c87b9771dca3c96b4f6af39620a50))
+
+## [2.6.1](https://github.com/googleapis/python-api-core/compare/v2.6.0...v2.6.1) (2022-03-05)
+
+
+### Bug Fixes
+
+* Remove py2 tag from wheel ([#343](https://github.com/googleapis/python-api-core/issues/343)) ([7e21e9e](https://github.com/googleapis/python-api-core/commit/7e21e9e34892472a34f9b44175fa761f0e3fd9ed))
+
+## [2.6.0](https://github.com/googleapis/python-api-core/compare/v2.5.0...v2.6.0) (2022-03-03)
+
+
+### Features
+
+* initial support for Extended Operations ([#344](https://github.com/googleapis/python-api-core/issues/344)) ([021bb7d](https://github.com/googleapis/python-api-core/commit/021bb7d5bf0a1d8ac58dbf0c738fac309135ba7d))
+
+## [2.5.0](https://github.com/googleapis/python-api-core/compare/v2.4.0...v2.5.0) (2022-02-02)
+
+
+### Features
+
+* add api_key to client options ([#248](https://github.com/googleapis/python-api-core/issues/248)) ([5e5ad37](https://github.com/googleapis/python-api-core/commit/5e5ad37b8161109d65b0fab43636f7424e570fa3))
+
+
+### Bug Fixes
+
+* **deps:** remove setuptools from dependencies ([#339](https://github.com/googleapis/python-api-core/issues/339)) ([c782f29](https://github.com/googleapis/python-api-core/commit/c782f294b50b078f01959627fb82aa4c5efec333))
+
+
+### Documentation
+
+* fix typo in library name ([#332](https://github.com/googleapis/python-api-core/issues/332)) ([f267111](https://github.com/googleapis/python-api-core/commit/f267111823545a6c67ef5f10b85cd8c2fab8a612))
+
+## [2.4.0](https://www.github.com/googleapis/python-api-core/compare/v2.3.2...v2.4.0) (2022-01-11)
+
+
+### Features
+
+* add support for 'error_info' ([#315](https://www.github.com/googleapis/python-api-core/issues/315)) ([cc46aa6](https://www.github.com/googleapis/python-api-core/commit/cc46aa68ec184871330d16a6c767f57a4f0eb633))
+* iterator for processing JSON responses in REST streaming. ([#317](https://www.github.com/googleapis/python-api-core/issues/317)) ([f9f2696](https://www.github.com/googleapis/python-api-core/commit/f9f26969842b456ea372bed941d712b7a9ab7239))
+
+## [2.3.2](https://www.github.com/googleapis/python-api-core/compare/v2.3.1...v2.3.2) (2021-12-16)
+
+
+### Bug Fixes
+
+* address broken wheels in version 2.3.1
+
+## [2.3.1](https://www.github.com/googleapis/python-api-core/compare/v2.3.0...v2.3.1) (2021-12-15)
+
+
+### Bug Fixes
+* exclude function target from retry deadline exceeded exception message ([#318](https://www.github.com/googleapis/python-api-core/issues/318)) ([34ebdcc](https://www.github.com/googleapis/python-api-core/commit/34ebdcc251d4f3d7d496e8e0b78847645a06650b))
+
+## [2.3.0](https://www.github.com/googleapis/python-api-core/compare/v2.2.2...v2.3.0) (2021-11-25)
+
+
+### Features
+
+* add operations rest client to support long-running operations. ([#311](https://www.github.com/googleapis/python-api-core/issues/311)) ([ce1adf3](https://www.github.com/googleapis/python-api-core/commit/ce1adf395982ede157c0f25a920946bb52789873))
+
+
+### Bug Fixes
+
+* handle bare 'grpc.Call' in 'from_grpc_error' ([#298](https://www.github.com/googleapis/python-api-core/issues/298)) ([060b339](https://www.github.com/googleapis/python-api-core/commit/060b339e3af296dd1772bfc1b4a0d2b4264cae1f))
+
+## [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02)
+
+
+### Bug Fixes
+
+* make 'gapic_v1.method.DEFAULT' a typed object ([#292](https://www.github.com/googleapis/python-api-core/issues/292)) ([ffc51f0](https://www.github.com/googleapis/python-api-core/commit/ffc51f03c7ce5d9f009ba859b8df385d52925578))
+
+## [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26)
+
+
+### Bug Fixes
+
+* revert "fix: do not error on LROs with no response or error" ([#294](https://www.github.com/googleapis/python-api-core/issues/294)) ([9e6091e](https://www.github.com/googleapis/python-api-core/commit/9e6091ee59a30e72a6278b369f6a08e7aef32f22))
+
+## [2.2.0](https://www.github.com/googleapis/python-api-core/compare/v2.1.1...v2.2.0) (2021-10-25)
+
+
+### Features
+
+* add 'GoogleAPICallError.error_details' property ([#286](https://www.github.com/googleapis/python-api-core/issues/286)) ([ef6f0fc](https://www.github.com/googleapis/python-api-core/commit/ef6f0fcfdfe771172056e35e3c990998b3b00416))
+
+## [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13)
+
+
+### Bug Fixes
+
+* add mypy checking + 'py.typed' file ([#290](https://www.github.com/googleapis/python-api-core/issues/290)) ([0023ee1](https://www.github.com/googleapis/python-api-core/commit/0023ee1fe0e8b80c7a9e8987e0f322a829e5d613))
+
+## [2.1.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.1...v2.1.0) (2021-10-05)
+
+
+### Features
+
+* add grpc transcoding + tests ([#259](https://www.github.com/googleapis/python-api-core/issues/259)) ([afe0fa1](https://www.github.com/googleapis/python-api-core/commit/afe0fa14c21289c8244606a9f81544cff8ac5f7c))
+* Add helper function to format query_params for rest transport. ([#275](https://www.github.com/googleapis/python-api-core/issues/275)) ([1c5eb4d](https://www.github.com/googleapis/python-api-core/commit/1c5eb4df93d78e791082d9282330ebf0faacd222))
+* add support for Python 3.10 ([#284](https://www.github.com/googleapis/python-api-core/issues/284)) ([a422a5d](https://www.github.com/googleapis/python-api-core/commit/a422a5d72cb6f363d57e7a4effe421ba8e049cde))
+
+## [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31)
+
+
+### Bug Fixes
+
+* do not error on LROs with no response or error ([#258](https://www.github.com/googleapis/python-api-core/issues/258)) ([618f192](https://www.github.com/googleapis/python-api-core/commit/618f19201af729205892fcecd9c8e315ba3174a3))
+
+## [2.0.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.0-b1...v2.0.0) (2021-08-18)
+
+### ⚠ BREAKING CHANGES
+
+* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
+
+### Bug Fixes
+
+* bump grpcio version to use stable aio API ([#234](https://www.github.com/googleapis/python-api-core/issues/234)) ([bdbf889](https://www.github.com/googleapis/python-api-core/commit/bdbf889210b709d7c1945f2160bcba9161b4dd2e))
+* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
+
+## [2.0.0b1](https://www.github.com/googleapis/python-api-core/compare/v1.31.1...v2.0.0b1) (2021-08-03)
+
+
+### ⚠ BREAKING CHANGES
+
+* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
+
+### Bug Fixes
+
+* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
+
+## [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26)
+
+
+### Bug Fixes
+
+* add 'requests.exceptions.ChunkedEncodingError' to retryable exceptions ([#237](https://www.github.com/googleapis/python-api-core/issues/237)) ([5e540f2](https://www.github.com/googleapis/python-api-core/commit/5e540f28493cc3e13260458a8d1c6a1abb2ed313))
+
+
+### Documentation
+
+* add Samples section to CONTRIBUTING.rst ([#229](https://www.github.com/googleapis/python-api-core/issues/229)) ([a12c051](https://www.github.com/googleapis/python-api-core/commit/a12c0516c42918b05415835029717814353b883b))
+
+## [1.31.0](https://www.github.com/googleapis/python-api-core/compare/v1.30.0...v1.31.0) (2021-07-07)
+
+
+### Features
+
+* add ServiceUnavailable exception to polling retries ([#184](https://www.github.com/googleapis/python-api-core/issues/184)) ([11032cf](https://www.github.com/googleapis/python-api-core/commit/11032cf08ecc16dd252a6cda8b33b0b28ec4f4ba))
+
+
+### Bug Fixes
+
+* undprecate entity factory helpers ([#101](https://www.github.com/googleapis/python-api-core/issues/101)) ([1fbee03](https://www.github.com/googleapis/python-api-core/commit/1fbee03495a136eef3d6aaa5ea0aadd6e4b58e8b)), closes [#100](https://www.github.com/googleapis/python-api-core/issues/100)
+
+## [1.30.0](https://www.github.com/googleapis/python-api-core/compare/v1.29.0...v1.30.0) (2021-06-08)
+
+
+### Features
+
+* add iterator capability to paged iterators ([#200](https://www.github.com/googleapis/python-api-core/issues/200)) ([3487d68](https://www.github.com/googleapis/python-api-core/commit/3487d68bdab6f20e2ab931c8283f63c94862cf31))
+
+## [1.29.0](https://www.github.com/googleapis/python-api-core/compare/v1.28.0...v1.29.0) (2021-06-02)
+
+
+### Features
+
+* HTTPIterator now accepts a page_size parameter to control page … ([#197](https://www.github.com/googleapis/python-api-core/issues/197)) ([a421913](https://www.github.com/googleapis/python-api-core/commit/a4219137a5bfcf2a6f44780ecdbf475c1129e461))
+
+
+### Documentation
+
+* fix broken links in multiprocessing.rst ([#195](https://www.github.com/googleapis/python-api-core/issues/195)) ([8d8bc51](https://www.github.com/googleapis/python-api-core/commit/8d8bc5150ee5543b4aeb2c271da034a5305d1436))
+
+## [1.28.0](https://www.github.com/googleapis/python-api-core/compare/v1.27.0...v1.28.0) (2021-05-20)
+
+
+### Bug Fixes
+
+* require google-auth>=1.25.0 ([#190](https://www.github.com/googleapis/python-api-core/issues/190)) ([155da5e](https://www.github.com/googleapis/python-api-core/commit/155da5e18cc2fdcfa57de6f956b7d078e79cd4b7))
+
+
+### Miscellaneous Chores
+
+* release 1.28.0 ([#192](https://www.github.com/googleapis/python-api-core/issues/192)) ([11b5da4](https://www.github.com/googleapis/python-api-core/commit/11b5da426a842541ca2b861d3387fc312b3f5b60))
+
+## [1.27.0](https://www.github.com/googleapis/python-api-core/compare/v1.26.3...v1.27.0) (2021-05-18)
+
+
+### Features
+
+* Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124))
+* retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334))
+
+## [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
+
+
+### Bug Fixes
+
+* skip empty policy bindings in `len()` and `iter()` ([#159](https://www.github.com/googleapis/python-api-core/issues/159)) ([9eaa786](https://www.github.com/googleapis/python-api-core/commit/9eaa7868164a7e98792de24d2be97f79fba22322))
+
+
+### Documentation
+
+* update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac))
+
+## [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
+
+
+### Bug Fixes
+
+* save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f))
+
+## [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
+
+
+### Bug Fixes
+
+* add operation name to x-goog-request-params in async client ([#137](https://www.github.com/googleapis/python-api-core/issues/137)) ([7271b23](https://www.github.com/googleapis/python-api-core/commit/7271b23afddb032e49e957525704d0cd5bfa4c65))
+
+## [1.26.0](https://www.github.com/googleapis/python-api-core/compare/v1.25.1...v1.26.0) (2021-02-08)
+
+
+### Features
+
+* allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395))
+
+## [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
+
+
+### Bug Fixes
+
+* add operation name to x-goog-request-params ([#133](https://www.github.com/googleapis/python-api-core/issues/133)) ([97cef4a](https://www.github.com/googleapis/python-api-core/commit/97cef4ad1db55938715f9ac8000d1b0ad1e71873))
+
+
+### Documentation
+
+* fix spelling errors in retry ([#131](https://www.github.com/googleapis/python-api-core/issues/131)) ([232dab0](https://www.github.com/googleapis/python-api-core/commit/232dab0ad3ef2cca0edfe707d8f90ca0ea200ba2))
+
+## [1.25.0](https://www.github.com/googleapis/python-api-core/compare/v1.24.1...v1.25.0) (2021-01-14)
+
+
+### Features
+
+* allow gRPC metadata to be passed to operations client ([#127](https://www.github.com/googleapis/python-api-core/issues/127)) ([73854e8](https://www.github.com/googleapis/python-api-core/commit/73854e897b885e9be290f2676a8a1466b4f041e4))
+
+
+### Documentation
+
+* **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787)
+
+## [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
+
+
+### Bug Fixes
+
+* support 'retry' for ops built from HTTP/gRPC responses ([#115](https://www.github.com/googleapis/python-api-core/issues/115)) ([7a38243](https://www.github.com/googleapis/python-api-core/commit/7a38243c351b228d103eee81fc5ae521ad1c930e)), closes [#87](https://www.github.com/googleapis/python-api-core/issues/87)
+
+## [1.24.0](https://www.github.com/googleapis/python-api-core/compare/v1.23.0...v1.24.0) (2020-12-14)
+
+
+### Features
+
+* add support for Python 3.9, drop support for Python 3.5 ([#111](https://www.github.com/googleapis/python-api-core/issues/111)) ([fdbed0f](https://www.github.com/googleapis/python-api-core/commit/fdbed0f0cbae8de21c73338a6817f8aa79cef4c9)), closes [#110](https://www.github.com/googleapis/python-api-core/issues/110)
+
+
+### Documentation
+
+* explain how to create credentials from dict ([#109](https://www.github.com/googleapis/python-api-core/issues/109)) ([5dce6d6](https://www.github.com/googleapis/python-api-core/commit/5dce6d61e7324a415c1b3ceaeec1ce1b5f1ea189))
+
+## [1.23.0](https://www.github.com/googleapis/python-api-core/compare/v1.22.4...v1.23.0) (2020-10-16)
+
+
+### Features
+
+* **api-core:** pass retry from result() to done() ([#9](https://www.github.com/googleapis/python-api-core/issues/9)) ([6623b31](https://www.github.com/googleapis/python-api-core/commit/6623b31a2040b834be808d711fa397dc428f1837))
+
+
+### Bug Fixes
+
+* map LRO errors to library exception types ([#86](https://www.github.com/googleapis/python-api-core/issues/86)) ([a855339](https://www.github.com/googleapis/python-api-core/commit/a85533903c57be4809fe76435e298409e0903931)), closes [#15](https://www.github.com/googleapis/python-api-core/issues/15)
+* harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34))
+* update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265))
+
+## [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
+
+
+### Bug Fixes
+
+* use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47))
+
+## [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
+
+
+### Bug Fixes
+
+* **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31)
+
+## [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
+
+
+### Bug Fixes
+
+* only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62))
+
+## [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
### Documentation
@@ -35,7 +670,7 @@
* allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991))
-### [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
+## [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
### Bug Fixes
@@ -49,7 +684,7 @@
* allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25)
-### [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
+## [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
### Bug Fixes
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f602..039f4368 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
-# Contributor Code of Conduct
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 19a83545..1a1f608b 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -1,4 +1,3 @@
-.. Generated by synthtool. DO NOT EDIT!
############
Contributing
############
@@ -21,8 +20,8 @@ In order to add a feature:
- The feature must be documented in both the API and narrative
documentation.
-- The feature must work fully on the following CPython versions: 2.7,
- 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows.
+- The feature must work fully on the following CPython versions:
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -50,9 +49,9 @@ You'll have to create a development environment using a Git checkout:
# Configure remotes such that you can pull changes from the googleapis/python-api-core
# repository into your local repository.
$ git remote add upstream git@github.com:googleapis/python-api-core.git
- # fetch and merge changes from upstream into master
+ # fetch and merge changes from upstream into main
$ git fetch upstream
- $ git merge upstream/master
+ $ git merge upstream/main
Now your local repo is set up such that you will push changes to your GitHub
repo, from which you can submit a pull request.
@@ -68,36 +67,19 @@ Using ``nox``
We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
+ $ nox -s unit
- $ nox -s unit-2.7
- $ nox -s unit-3.7
- $ ...
+- To run a single unit test::
- .. note::
-
- The unit tests and system tests are described in the
- ``noxfile.py`` files in each directory.
-
-.. nox: https://pypi.org/project/nox/
+ $ nox -s unit-3.13 -- -k
-Note on Editable Installs / Develop Mode
-========================================
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
+ .. note::
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
+ The unit tests tests are described in the ``noxfile.py`` files
+ in each directory.
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
+.. nox: https://pypi.org/project/nox/
*****************************************
I'm getting weird errors... Can you help?
@@ -112,8 +94,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
-- PEP8 compliance, with exceptions defined in the linter configuration.
+ $ nox -s blacken
+
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -123,12 +109,22 @@ Coding Style
variables::
export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
- export GOOGLE_CLOUD_TESTING_BRANCH="master"
+ export GOOGLE_CLOUD_TESTING_BRANCH="main"
By doing this, you are specifying the location of the most up-to-date
version of ``python-api-core``. The the suggested remote name ``upstream``
should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``master``).
+ the branch should be the main branch on that remote (``main``).
+
+- This repository contains configuration for the
+ `pre-commit `__ tool, which automates checking
+ our linters during a commit. If you have it installed on your ``$PATH``,
+ you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
Exceptions to PEP8:
@@ -136,40 +132,6 @@ Exceptions to PEP8:
"Function-Under-Test"), which is PEP8-incompliant, but more readable.
Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
-********************
-Running System Tests
-********************
-
-- To run system tests, you can execute::
-
- $ nox -s system-3.7
- $ nox -s system-2.7
-
- .. note::
-
- System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
- of Python 3.
-
- This alone will not run the tests. You'll need to change some local
- auth settings and change some configuration in your project to
- run all the tests.
-
-- System tests will be run against an actual project and
- so you'll need to provide some environment variables to facilitate
- authentication to your project:
-
- - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
- Such a file can be downloaded directly from the developer's console by clicking
- "Generate new JSON key". See private key
- `docs `__
- for more details.
-
-- Once you have downloaded your json keys, set the environment variable
- ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
-
- $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
-
*************
Test Coverage
@@ -191,6 +153,30 @@ Build the docs via:
$ nox -s docs
+*************************
+Samples and code snippets
+*************************
+
+Code samples and snippets live in the `samples/` catalogue. Feel free to
+provide more examples, but make sure to write tests for those examples.
+Each folder containing example code requires its own `noxfile.py` script
+which automates testing. If you decide to create a new folder, you can
+base it on the `samples/snippets` folder (providing `noxfile.py` and
+the requirements files).
+
+The tests will run against a real Google Cloud Project, so you should
+configure them just like the System Tests.
+
+- To run sample tests, you can execute::
+
+ # Run all tests in a folder
+ $ cd samples/snippets
+ $ nox -s py-3.8
+
+ # Run a single sample test
+ $ cd samples/snippets
+ $ nox -s py-3.8 -- -k
+
********************************************
Note About ``README`` as it pertains to PyPI
********************************************
@@ -199,7 +185,7 @@ The `description on PyPI`_ for the project comes directly from the
``README``. Due to the reStructuredText (``rst``) parser used by
PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
instead of
-``https://github.com/googleapis/python-api-core/blob/master/CONTRIBUTING.rst``)
+``https://github.com/googleapis/python-api-core/blob/main/CONTRIBUTING.rst``)
may cause problems creating links or rendering the description.
.. _description on PyPI: https://pypi.org/project/google-api-core
@@ -211,34 +197,27 @@ Supported Python Versions
We support:
-- `Python 3.5`_
-- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
+- `Python 3.9`_
+- `Python 3.10`_
+- `Python 3.11`_
+- `Python 3.12`_
+- `Python 3.13`_
-.. _Python 3.5: https://docs.python.org/3.5/
-.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
+.. _Python 3.11: https://docs.python.org/3.11/
+.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
-.. _config: https://github.com/googleapis/python-api-core/blob/master/noxfile.py
-
-Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
-
-We also explicitly decided to support Python 3 beginning with version
-3.5. Reasons for this include:
-
-- Encouraging use of newest versions of Python 3
-- Taking the lead of `prominent`_ open-source `projects`_
-- `Unicode literal support`_ which allows for a cleaner codebase that
- works in both Python 2 and Python 3
+.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py
-.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
-.. _projects: http://flask.pocoo.org/docs/0.10/python3/
-.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
**********
Versioning
diff --git a/LICENSE b/LICENSE
index a8ee855d..d6456956 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d12..d6814cd6 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/README.rst b/README.rst
index 244043ea..58ae26cb 100644
--- a/README.rst
+++ b/README.rst
@@ -1,7 +1,7 @@
Core Library for Google Client Libraries
========================================
-|pypi| |versions|
+|pypi| |versions|
This library is not meant to stand-alone. Instead it defines
common helpers used by all Google API clients. For more information, see the
@@ -16,8 +16,16 @@ common helpers used by all Google API clients. For more information, see the
Supported Python Versions
-------------------------
-Python >= 3.5
+Python >= 3.7
-Deprecated Python Versions
---------------------------
-Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+Unsupported Python Versions
+---------------------------
+
+Python == 2.7, Python == 3.5, Python == 3.6.
+
+The last version of this library compatible with Python 2.7 and 3.5 is
+`google-api-core==1.31.1`.
+
+The last version of this library compatible with Python 3.6 is
+`google-api-core==2.8.2`.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..8b58ae9c
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf229..b0a29546 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,20 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
+
+/* Insert space between methods for readability */
+dl.method {
+ padding-top: 10px;
+ padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+ padding-bottom: 50px
+}
diff --git a/docs/auth.rst b/docs/auth.rst
index cec7c16d..3dcc5fd3 100644
--- a/docs/auth.rst
+++ b/docs/auth.rst
@@ -6,7 +6,11 @@ Authentication
Overview
========
-* **If you're running in Compute Engine or App Engine**,
+For a language agnostic overview of authentication on Google Cloud, see `Authentication Overview`_.
+
+.. _Authentication Overview: https://cloud.google.com/docs/authentication
+
+* **If you're running in a Google Virtual Machine Environment (Compute Engine, App Engine, Cloud Run, Cloud Functions)**,
authentication should "just work".
* **If you're developing locally**,
@@ -41,7 +45,7 @@ Overview
$ export GOOGLE_APPLICATION_CREDENTIALS="/path/to/keyfile.json"
-.. _service account: https://cloud.google.com/storage/docs/authentication#generating-a-private-key
+.. _service account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
Client-Provided Authentication
==============================
@@ -97,27 +101,17 @@ After creation, you can pass it directly to a :class:`Client `
-just for Google App Engine:
-
-.. code:: python
-
- from google.auth import app_engine
- credentials = app_engine.Credentials()
+.. _google-auth-guide: https://googleapis.dev/python/google-auth/latest/user-guide.html#service-account-private-key-files
Google Compute Engine Environment
---------------------------------
+These credentials are used in Google Virtual Machine Environments.
+This includes most App Engine runtimes, Compute Engine, Cloud
+Functions, and Cloud Run.
+
To create
-:class:`credentials `
-just for Google Compute Engine:
+:class:`credentials `:
.. code:: python
@@ -129,16 +123,24 @@ Service Accounts
A `service account`_ is stored in a JSON keyfile.
-The
-:meth:`from_service_account_json() `
-factory can be used to create a :class:`Client ` with
-service account credentials.
+.. code:: python
+
+ from google.oauth2 import service_account
-For example, with a JSON keyfile:
+ credentials = service_account.Credentials.from_service_account_file(
+ '/path/to/key.json')
+
+A JSON string or dictionary:
.. code:: python
- client = Client.from_service_account_json('/path/to/keyfile.json')
+ import json
+
+ from google.oauth2 import service_account
+
+ json_account_info = json.loads(...) # convert JSON to dictionary
+ credentials = service_account.Credentials.from_service_account_info(
+ json_account_info)
.. tip::
@@ -160,10 +162,10 @@ possible to call Google Cloud APIs with a user account via
A production application should **use a service account**,
but you may wish to use your own personal user account when first
- getting started with the ``google-cloud-python`` library.
+ getting started with the ``google-cloud-*`` library.
The simplest way to use credentials from a user account is via
-Application Default Credentials using ``gcloud auth login``
+Application Default Credentials using ``gcloud auth application-default login``
(as mentioned above) and :func:`google.auth.default`:
.. code:: python
@@ -183,67 +185,10 @@ Troubleshooting
Setting up a Service Account
----------------------------
-If your application is not running on Google Compute Engine,
-you need a `Google Developers Service Account`_.
-
-#. Visit the `Google Developers Console`_.
-
-#. Create a new project or click on an existing project.
-
-#. Navigate to **APIs & auth** > **APIs** and enable the APIs
- that your application requires.
-
- .. raw:: html
-
-
-
- .. note::
-
- You may need to enable billing in order to use these services.
-
- * **BigQuery**
-
- * BigQuery API
+If your application is not running on a Google Virtual Machine Environment,
+you need a Service Account. See `Creating a Service Account`_.
- * **Datastore**
-
- * Google Cloud Datastore API
-
- * **Pub/Sub**
-
- * Google Cloud Pub/Sub
-
- * **Storage**
-
- * Google Cloud Storage
- * Google Cloud Storage JSON API
-
-#. Navigate to **APIs & auth** > **Credentials**.
-
- You should see a screen like one of the following:
-
- .. raw:: html
-
-
-
- .. raw:: html
-
-
-
- Find the "Add credentials" drop down and select "Service account" to be
- guided through downloading a new JSON keyfile.
-
- If you want to re-use an existing service account,
- you can easily generate a new keyfile.
- Just select the account you wish to re-use,
- and click **Generate new JSON key**:
-
- .. raw:: html
-
-
-
-.. _Google Developers Console: https://console.developers.google.com/project
-.. _Google Developers Service Account: https://developers.google.com/accounts/docs/OAuth2ServiceAccount
+.. _Creating a Service Account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
Using Google Compute Engine
---------------------------
@@ -262,24 +207,7 @@ you add the correct scopes for the APIs you want to access:
* ``https://www.googleapis.com/auth/cloud-platform``
* ``https://www.googleapis.com/auth/cloud-platform.read-only``
-* **BigQuery**
-
- * ``https://www.googleapis.com/auth/bigquery``
- * ``https://www.googleapis.com/auth/bigquery.insertdata``
-
-* **Datastore**
-
- * ``https://www.googleapis.com/auth/datastore``
- * ``https://www.googleapis.com/auth/userinfo.email``
-
-* **Pub/Sub**
-
- * ``https://www.googleapis.com/auth/pubsub``
-
-* **Storage**
-
- * ``https://www.googleapis.com/auth/devstorage.full_control``
- * ``https://www.googleapis.com/auth/devstorage.read_only``
- * ``https://www.googleapis.com/auth/devstorage.read_write``
+For scopes for specific APIs see `OAuth 2.0 Scopes for Google APIs`_
.. _set up the GCE instance: https://cloud.google.com/compute/docs/authentication#using
+.. _OAuth 2.0 Scopes for Google APIS: https://developers.google.com/identity/protocols/oauth2/scopes
diff --git a/docs/conf.py b/docs/conf.py
index a53c37db..ad4723c0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,17 @@
# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
# google-api-core documentation build configuration file
#
@@ -20,12 +33,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -35,6 +52,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
@@ -58,13 +76,13 @@
# The encoding of source files.
# source_encoding = 'utf-8-sig'
-# The master toctree document.
-master_doc = "index"
+# The root toctree document.
+root_doc = "index"
# General information about the project.
-project = u"google-api-core"
-copyright = u"2019, Google"
-author = u"Google APIs"
+project = "google-api-core"
+copyright = "2019, Google"
+author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -90,7 +108,13 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "**/.nox/**/*",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -256,9 +280,9 @@
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
- master_doc,
+ root_doc,
"google-api-core.tex",
- u"google-api-core Documentation",
+ "google-api-core Documentation",
author,
"manual",
)
@@ -291,9 +315,9 @@
# (source start file, name, description, authors, manual section).
man_pages = [
(
- master_doc,
+ root_doc,
"google-api-core",
- u"google-api-core Documentation",
+ "google-api-core Documentation",
[author],
1,
)
@@ -310,9 +334,9 @@
# dir menu entry, description, category)
texinfo_documents = [
(
- master_doc,
+ root_doc,
"google-api-core",
- u"google-api-core Documentation",
+ "google-api-core Documentation",
author,
"google-api-core",
"google-api-core Library",
@@ -335,14 +359,15 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": (
"https://googleapis.dev/python/google-api-core/latest/",
None,
),
- "grpc": ("https://grpc.io/grpc/python/", None),
-
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+ "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
index 1cb29d4c..536d17b2 100644
--- a/docs/multiprocessing.rst
+++ b/docs/multiprocessing.rst
@@ -1,7 +1,7 @@
.. note::
- Because this client uses :mod:`grpcio` library, it is safe to
+ Because this client uses :mod:`grpc` library, it is safe to
share instances across threads. In multiprocessing scenarios, the best
practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
:class:`multiprocessing.Process`.
diff --git a/docs/retry.rst b/docs/retry.rst
index 97a7f2ca..6e165f56 100644
--- a/docs/retry.rst
+++ b/docs/retry.rst
@@ -10,4 +10,5 @@ Retry in AsyncIO
.. automodule:: google.api_core.retry_async
:members:
+ :noindex:
:show-inheritance:
diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py
index c762e183..b80ea372 100644
--- a/google/api_core/__init__.py
+++ b/google/api_core/__init__.py
@@ -14,10 +14,9 @@
"""Google API Core.
-This package contains common code and utilties used by Google client libraries.
+This package contains common code and utilities used by Google client libraries.
"""
-from pkg_resources import get_distribution
+from google.api_core import version as api_core_version
-
-__version__ = get_distribution("google-api-core").version
+__version__ = api_core_version.__version__
diff --git a/google/api_core/_rest_streaming_base.py b/google/api_core/_rest_streaming_base.py
new file mode 100644
index 00000000..3bc87a96
--- /dev/null
+++ b/google/api_core/_rest_streaming_base.py
@@ -0,0 +1,118 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from collections import deque
+import string
+from typing import Deque, Union
+import types
+
+import proto
+import google.protobuf.message
+from google.protobuf.json_format import Parse
+
+
+class BaseResponseIterator:
+ """Base Iterator over REST API responses. This class should not be used directly.
+
+ Args:
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError: If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response_message_cls = response_message_cls
+ # Contains a list of JSON responses ready to be sent to user.
+ self._ready_objs: Deque[str] = deque()
+ # Current JSON response being built.
+ self._obj = ""
+ # Keeps track of the nesting level within a JSON object.
+ self._level = 0
+ # Keeps track whether HTTP response is currently sending values
+ # inside of a string value.
+ self._in_string = False
+ # Whether an escape symbol "\" was encountered.
+ self._escape_next = False
+
+ self._grab = types.MethodType(self._create_grab(), self)
+
+ def _process_chunk(self, chunk: str):
+ if self._level == 0:
+ if chunk[0] != "[":
+ raise ValueError(
+ "Can only parse array of JSON objects, instead got %s" % chunk
+ )
+ for char in chunk:
+ if char == "{":
+ if self._level == 1:
+ # Level 1 corresponds to the outermost JSON object
+ # (i.e. the one we care about).
+ self._obj = ""
+ if not self._in_string:
+ self._level += 1
+ self._obj += char
+ elif char == "}":
+ self._obj += char
+ if not self._in_string:
+ self._level -= 1
+ if not self._in_string and self._level == 1:
+ self._ready_objs.append(self._obj)
+ elif char == '"':
+ # Helps to deal with an escaped quotes inside of a string.
+ if not self._escape_next:
+ self._in_string = not self._in_string
+ self._obj += char
+ elif char in string.whitespace:
+ if self._in_string:
+ self._obj += char
+ elif char == "[":
+ if self._level == 0:
+ self._level += 1
+ else:
+ self._obj += char
+ elif char == "]":
+ if self._level == 1:
+ self._level -= 1
+ else:
+ self._obj += char
+ else:
+ self._obj += char
+ self._escape_next = not self._escape_next if char == "\\" else False
+
+ def _create_grab(self):
+ if issubclass(self._response_message_cls, proto.Message):
+
+ def grab(this):
+ return this._response_message_cls.from_json(
+ this._ready_objs.popleft(), ignore_unknown_fields=True
+ )
+
+ return grab
+ elif issubclass(self._response_message_cls, google.protobuf.message.Message):
+
+ def grab(this):
+ return Parse(this._ready_objs.popleft(), this._response_message_cls())
+
+ return grab
+ else:
+ raise ValueError(
+ "Response message class must be a subclass of proto.Message or google.protobuf.message.Message."
+ )
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
index be52d97d..b002b409 100644
--- a/google/api_core/bidi.py
+++ b/google/api_core/bidi.py
@@ -17,11 +17,10 @@
import collections
import datetime
import logging
+import queue as queue_module
import threading
import time
-from six.moves import queue
-
from google.api_core import exceptions
_LOGGER = logging.getLogger(__name__)
@@ -71,7 +70,7 @@ class _RequestQueueGenerator(object):
CPU consumed by spinning is pretty minuscule.
Args:
- queue (queue.Queue): The request queue.
+ queue (queue_module.Queue): The request queue.
period (float): The number of seconds to wait for items from the queue
before checking if the RPC is cancelled. In practice, this
determines the maximum amount of time the request consumption
@@ -92,11 +91,9 @@ def __init__(self, queue, period=1, initial_request=None):
def _is_active(self):
# Note: there is a possibility that this starts *before* the call
# property is set. So we have to check if self.call is set before
- # seeing if it's active.
- if self.call is not None and not self.call.is_active():
- return False
- else:
- return True
+ # seeing if it's active. We need to return True if self.call is None.
+ # See https://github.com/googleapis/python-api-core/issues/560.
+ return self.call is None or self.call.is_active()
def __iter__(self):
if self._initial_request is not None:
@@ -108,7 +105,7 @@ def __iter__(self):
while True:
try:
item = self._queue.get(timeout=self._period)
- except queue.Empty:
+ except queue_module.Empty:
if not self._is_active():
_LOGGER.debug(
"Empty queue and inactive call, exiting request " "generator."
@@ -247,7 +244,7 @@ def __init__(self, start_rpc, initial_request=None, metadata=None):
self._start_rpc = start_rpc
self._initial_request = initial_request
self._rpc_metadata = metadata
- self._request_queue = queue.Queue()
+ self._request_queue = queue_module.Queue()
self._request_generator = None
self._is_active = False
self._callbacks = []
@@ -266,6 +263,10 @@ def add_done_callback(self, callback):
self._callbacks.append(callback)
def _on_call_done(self, future):
+ # This occurs when the RPC errors or is successfully terminated.
+ # Note that grpc's "future" here can also be a grpc.RpcError.
+ # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331
+ # that `grpc.RpcError` is also `grpc.call`.
for callback in self._callbacks:
callback(future)
@@ -277,7 +278,13 @@ def open(self):
request_generator = _RequestQueueGenerator(
self._request_queue, initial_request=self._initial_request
)
- call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+ try:
+ call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+ except exceptions.GoogleAPICallError as exc:
+ # The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ # available from the ``response`` property on the mapped exception.
+ self._on_call_done(exc.response)
+ raise
request_generator.call = call
@@ -299,6 +306,8 @@ def close(self):
self._request_queue.put(None)
self.call.cancel()
self._request_generator = None
+ self._initial_request = None
+ self._callbacks = []
# Don't set self.call to None. Keep it around so that send/recv can
# raise the error.
@@ -365,7 +374,7 @@ class ResumableBidiRpc(BidiRpc):
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -590,7 +599,7 @@ class BackgroundConsumer(object):
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -645,6 +654,7 @@ def _thread_main(self, ready):
# Keeping the lock throughout avoids that.
# In the future, we could use `Condition.wait_for` if we drop
# Python 2.7.
+ # See: https://github.com/googleapis/python-api-core/issues/211
with self._wake:
while self._paused:
_LOGGER.debug("paused, waiting for waking.")
@@ -654,7 +664,8 @@ def _thread_main(self, ready):
_LOGGER.debug("waiting for recv.")
response = self._bidi_rpc.recv()
_LOGGER.debug("recved response.")
- self._on_response(response)
+ if self._on_response is not None:
+ self._on_response(response)
except exceptions.GoogleAPICallError as exc:
_LOGGER.debug(
@@ -709,6 +720,7 @@ def stop(self):
_LOGGER.warning("Background thread did not exit.")
self._thread = None
+ self._on_response = None
@property
def is_active(self):
@@ -727,7 +739,7 @@ def resume(self):
"""Resumes the response stream."""
with self._wake:
self._paused = False
- self._wake.notifyAll()
+ self._wake.notify_all()
@property
def is_paused(self):
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
index b196b7a9..f0678d24 100644
--- a/google/api_core/client_info.py
+++ b/google/api_core/client_info.py
@@ -19,15 +19,20 @@
"""
import platform
+from typing import Union
-import pkg_resources
+from google.api_core import version as api_core_version
_PY_VERSION = platform.python_version()
-_API_CORE_VERSION = pkg_resources.get_distribution("google-api-core").version
+_API_CORE_VERSION = api_core_version.__version__
+
+_GRPC_VERSION: Union[str, None]
try:
- _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
-except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ import grpc
+
+ _GRPC_VERSION = grpc.__version__
+except ImportError: # pragma: NO COVER
_GRPC_VERSION = None
@@ -40,10 +45,10 @@ class ClientInfo(object):
Args:
python_version (str): The Python interpreter version, for example,
- ``'2.7.13'``.
+ ``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The sversion of gapic-generated client
+ gapic_version (Optional[str]): The version of gapic-generated client
library, if the library was generated by gapic.
client_library_version (Optional[str]): The version of the client
library, generally used if the client library was not generated
@@ -52,6 +57,9 @@ class ClientInfo(object):
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
+ protobuf_runtime_version (Optional[str]): The protobuf runtime version.
"""
def __init__(
@@ -62,6 +70,8 @@ def __init__(
gapic_version=None,
client_library_version=None,
user_agent=None,
+ rest_version=None,
+ protobuf_runtime_version=None,
):
self.python_version = python_version
self.grpc_version = grpc_version
@@ -69,6 +79,8 @@ def __init__(
self.gapic_version = gapic_version
self.client_library_version = client_library_version
self.user_agent = user_agent
+ self.rest_version = rest_version
+ self.protobuf_runtime_version = protobuf_runtime_version
def to_user_agent(self):
"""Returns the user-agent string for this client info."""
@@ -85,6 +97,9 @@ def to_user_agent(self):
if self.grpc_version is not None:
ua += "grpc/{grpc_version} "
+ if self.rest_version is not None:
+ ua += "rest/{rest_version} "
+
ua += "gax/{api_core_version} "
if self.gapic_version is not None:
@@ -93,4 +108,7 @@ def to_user_agent(self):
if self.client_library_version is not None:
ua += "gccl/{client_library_version} "
+ if self.protobuf_runtime_version is not None:
+ ua += "pb/{protobuf_runtime_version} "
+
return ua.format(**self.__dict__).strip()
diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py
new file mode 100644
index 00000000..837e3e0c
--- /dev/null
+++ b/google/api_core/client_logging.py
@@ -0,0 +1,144 @@
+import logging
+import json
+import os
+
+from typing import List, Optional
+
+_LOGGING_INITIALIZED = False
+_BASE_LOGGER_NAME = "google"
+
+# Fields to be included in the StructuredLogFormatter.
+#
+# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields.
+_recognized_logging_fields = [
+ "httpRequest",
+ "rpcName",
+ "serviceName",
+ "credentialsType",
+ "credentialsInfo",
+ "universeDomain",
+ "request",
+ "response",
+ "metadata",
+ "retryAttempt",
+ "httpResponse",
+] # Additional fields to be Logged.
+
+
+def logger_configured(logger) -> bool:
+ """Determines whether `logger` has non-default configuration
+
+ Args:
+ logger: The logger to check.
+
+ Returns:
+ bool: Whether the logger has any non-default configuration.
+ """
+ return (
+ logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate
+ )
+
+
+def initialize_logging():
+ """Initializes "google" loggers, partly based on the environment variable
+
+ Initializes the "google" logger and any loggers (at the "google"
+ level or lower) specified by the environment variable
+ GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers
+ were previously configured. If any such loggers (including the
+ "google" logger) are initialized, they are set to NOT propagate
+ log events up to their parent loggers.
+
+ This initialization is executed only once, and hence the
+ environment variable is only processed the first time this
+ function is called.
+ """
+ global _LOGGING_INITIALIZED
+ if _LOGGING_INITIALIZED:
+ return
+ scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "")
+ setup_logging(scopes)
+ _LOGGING_INITIALIZED = True
+
+
+def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]:
+ """Returns a list of logger names.
+
+ Splits the single string of comma-separated logger names into a list of individual logger name strings.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ Returns:
+ A list of all the logger names in scopes.
+ """
+ if not scopes:
+ return []
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace.
+ # TODO(b/380481951): Support logging multiple scopes.
+ # TODO(b/380483756): Raise or log a warning for an invalid scope.
+ namespaces = [scopes]
+ return namespaces
+
+
+def configure_defaults(logger):
+ """Configures `logger` to emit structured info to stdout."""
+ if not logger_configured(logger):
+ console_handler = logging.StreamHandler()
+ logger.setLevel("DEBUG")
+ logger.propagate = False
+ formatter = StructuredLogFormatter()
+ console_handler.setFormatter(formatter)
+ logger.addHandler(console_handler)
+
+
+def setup_logging(scopes: str = ""):
+ """Sets up logging for the specified `scopes`.
+
+ If the loggers specified in `scopes` have not been previously
+ configured, this will configure them to emit structured log
+ entries to stdout, and to not propagate their log events to their
+ parent loggers. Additionally, if the "google" logger (whether it
+ was specified in `scopes` or not) was not previously configured,
+ it will also configure it to not propagate log events to the root
+ logger.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ """
+
+ # only returns valid logger scopes (namespaces)
+ # this list has at most one element.
+ logger_names = parse_logging_scopes(scopes)
+
+ for namespace in logger_names:
+ # This will either create a module level logger or get the reference of the base logger instantiated above.
+ logger = logging.getLogger(namespace)
+
+ # Configure default settings.
+ configure_defaults(logger)
+
+ # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes.
+ base_logger = logging.getLogger(_BASE_LOGGER_NAME)
+ if not logger_configured(base_logger):
+ base_logger.propagate = False
+
+
+# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation.
+class StructuredLogFormatter(logging.Formatter):
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as
+ # function name, file name, and line no. appear in a log output.
+ def format(self, record: logging.LogRecord):
+ log_obj = {
+ "timestamp": self.formatTime(record),
+ "severity": record.levelname,
+ "name": record.name,
+ "message": record.getMessage(),
+ }
+
+ for field_name in _recognized_logging_fields:
+ value = getattr(record, field_name, None)
+ if value is not None:
+ log_obj[field_name] = value
+ return json.dumps(log_obj)
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
index 57000e95..d11665d2 100644
--- a/google/api_core/client_options.py
+++ b/google/api_core/client_options.py
@@ -48,6 +48,8 @@ def get_client_cert():
"""
+from typing import Callable, Mapping, Optional, Sequence, Tuple
+
class ClientOptions(object):
"""Client Options used to set options on clients.
@@ -55,53 +57,88 @@ class ClientOptions(object):
Args:
api_endpoint (Optional[str]): The desired API endpoint, e.g.,
compute.googleapis.com
- client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback
which returns client certificate bytes and private key bytes both in
PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
are mutually exclusive.
- client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]):
A callback which returns client certificate file path, encrypted
private key file path, and the passphrase bytes.``client_cert_source``
and ``client_encrypted_cert_source`` are mutually exclusive.
quota_project_id (Optional[str]): A project name that a client's
quota belongs to.
credentials_file (Optional[str]): A path to a file storing credentials.
+ ``credentials_file` and ``api_key`` are mutually exclusive.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional[Sequence[str]]): OAuth access token override scopes.
+ api_key (Optional[str]): Google API key. ``credentials_file`` and
+ ``api_key`` are mutually exclusive.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the service endpoint value will be used as a default.
+ An example of a valid ``api_audience`` is: "https://language.googleapis.com".
+ universe_domain (Optional[str]): The desired universe domain. This must match
+ the one in credentials. If not set, the default universe domain is
+ `googleapis.com`. If both `api_endpoint` and `universe_domain` are set,
+ then `api_endpoint` is used as the service endpoint. If `api_endpoint` is
+ not specified, the format will be `{service}.{universe_domain}`.
Raises:
ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
- are provided.
+ are provided, or both ``credentials_file`` and ``api_key`` are provided.
"""
def __init__(
self,
- api_endpoint=None,
- client_cert_source=None,
- client_encrypted_cert_source=None,
- quota_project_id=None,
- credentials_file=None,
- scopes=None,
+ api_endpoint: Optional[str] = None,
+ client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ client_encrypted_cert_source: Optional[
+ Callable[[], Tuple[str, str, bytes]]
+ ] = None,
+ quota_project_id: Optional[str] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ api_key: Optional[str] = None,
+ api_audience: Optional[str] = None,
+ universe_domain: Optional[str] = None,
):
if client_cert_source and client_encrypted_cert_source:
raise ValueError(
"client_cert_source and client_encrypted_cert_source are mutually exclusive"
)
+ if api_key and credentials_file:
+ raise ValueError("api_key and credentials_file are mutually exclusive")
self.api_endpoint = api_endpoint
self.client_cert_source = client_cert_source
self.client_encrypted_cert_source = client_encrypted_cert_source
self.quota_project_id = quota_project_id
self.credentials_file = credentials_file
self.scopes = scopes
+ self.api_key = api_key
+ self.api_audience = api_audience
+ self.universe_domain = universe_domain
- def __repr__(self):
+ def __repr__(self) -> str:
return "ClientOptions: " + repr(self.__dict__)
-def from_dict(options):
+def from_dict(options: Mapping[str, object]) -> ClientOptions:
"""Construct a client options object from a mapping object.
Args:
- options (six.moves.collections_abc.Mapping): A mapping object with client options.
+ options (collections.abc.Mapping): A mapping object with client options.
See the docstring for ClientOptions for details on valid arguments.
"""
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
index e52fb1dd..c3792300 100644
--- a/google/api_core/datetime_helpers.py
+++ b/google/api_core/datetime_helpers.py
@@ -18,12 +18,10 @@
import datetime
import re
-import pytz
-
from google.protobuf import timestamp_pb2
-_UTC_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
+_UTC_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
# datetime.strptime cannot handle nanosecond precision: parse w/ regex
@@ -44,7 +42,7 @@
def utcnow():
"""A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
- return datetime.datetime.utcnow()
+ return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None)
def to_milliseconds(value):
@@ -83,9 +81,9 @@ def to_microseconds(value):
int: Microseconds since the unix epoch.
"""
if not value.tzinfo:
- value = value.replace(tzinfo=pytz.utc)
+ value = value.replace(tzinfo=datetime.timezone.utc)
# Regardless of what timezone is on the value, convert it to UTC.
- value = value.astimezone(pytz.utc)
+ value = value.astimezone(datetime.timezone.utc)
# Convert the datetime to a microsecond timestamp.
return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
@@ -153,10 +151,10 @@ def from_rfc3339(value):
micros = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
micros = nanos // 1000
- return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc)
+ return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
@@ -172,7 +170,7 @@ def to_rfc3339(value, ignore_zone=True):
datetime object is ignored and the datetime is treated as UTC.
Returns:
- str: The RFC3339 formated string representing the datetime.
+ str: The RFC3339 formatted string representing the datetime.
"""
if not ignore_zone and value.tzinfo is not None:
# Convert to UTC and remove the time zone info.
@@ -247,7 +245,7 @@ def from_rfc3339(cls, stamp):
nanos = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
return cls(
bare.year,
bare.month,
@@ -256,7 +254,7 @@ def from_rfc3339(cls, stamp):
bare.minute,
bare.second,
nanosecond=nanos,
- tzinfo=pytz.UTC,
+ tzinfo=datetime.timezone.utc,
)
def timestamp_pb(self):
@@ -265,7 +263,11 @@ def timestamp_pb(self):
Returns:
(:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
"""
- inst = self if self.tzinfo is not None else self.replace(tzinfo=pytz.UTC)
+ inst = (
+ self
+ if self.tzinfo is not None
+ else self.replace(tzinfo=datetime.timezone.utc)
+ )
delta = inst - _UTC_EPOCH
seconds = int(delta.total_seconds())
nanos = self._nanosecond or self.microsecond * 1000
@@ -292,5 +294,5 @@ def from_timestamp_pb(cls, stamp):
bare.minute,
bare.second,
nanosecond=stamp.nanos,
- tzinfo=pytz.UTC,
+ tzinfo=datetime.timezone.utc,
)
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
index b9c46ca0..e3eb696c 100644
--- a/google/api_core/exceptions.py
+++ b/google/api_core/exceptions.py
@@ -21,18 +21,44 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-import six
-from six.moves import http_client
+import http.client
+from typing import Optional, Dict
+from typing import Union
+import warnings
+
+from google.rpc import error_details_pb2
+
+
+def _warn_could_not_import_grpcio_status():
+ warnings.warn(
+ "Please install grpcio-status to obtain helpful grpc error messages.",
+ ImportWarning,
+ ) # pragma: NO COVER
+
try:
import grpc
+
+ try:
+ from grpc_status import rpc_status
+ except ImportError: # pragma: NO COVER
+ _warn_could_not_import_grpcio_status()
+ rpc_status = None
except ImportError: # pragma: NO COVER
grpc = None
# Lookup tables for mapping exceptions from HTTP and gRPC transports.
-# Populated by _APICallErrorMeta
-_HTTP_CODE_TO_EXCEPTION = {}
-_GRPC_CODE_TO_EXCEPTION = {}
+# Populated by _GoogleAPICallErrorMeta
+_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+
+# Additional lookup table to map integer status codes to grpc status code
+# grpc does not currently support initializing enums from ints
+# i.e., grpc.StatusCode(5) raises an error
+_INT_TO_GRPC_CODE = {}
+if grpc is not None: # pragma: no branch
+ for x in grpc.StatusCode:
+ _INT_TO_GRPC_CODE[x.value[0]] = x
class GoogleAPIError(Exception):
@@ -47,13 +73,12 @@ class DuplicateCredentialArgs(GoogleAPIError):
pass
-@six.python_2_unicode_compatible
class RetryError(GoogleAPIError):
"""Raised when a function has exhausted all of its available retries.
Args:
message (str): The exception message.
- cause (Exception): The last exception raised when retring the
+ cause (Exception): The last exception raised when retrying the
function.
"""
@@ -83,19 +108,20 @@ def __new__(mcs, name, bases, class_dict):
return cls
-@six.python_2_unicode_compatible
-@six.add_metaclass(_GoogleAPICallErrorMeta)
-class GoogleAPICallError(GoogleAPIError):
+class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta):
"""Base class for exceptions raised by calling API methods.
Args:
message (str): The exception message.
errors (Sequence[Any]): An optional list of error details.
+ details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
response (Union[requests.Request, grpc.Call]): The response or
gRPC call metadata.
+ error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info
+ (google.rpc.error_details.ErrorInfo).
"""
- code = None
+ code: Union[int, None] = None
"""Optional[int]: The HTTP status code associated with this error.
This may be ``None`` if the exception does not have a direct mapping
@@ -111,15 +137,67 @@ class GoogleAPICallError(GoogleAPIError):
This may be ``None`` if the exception does not match up to a gRPC error.
"""
- def __init__(self, message, errors=(), response=None):
+ def __init__(self, message, errors=(), details=(), response=None, error_info=None):
super(GoogleAPICallError, self).__init__(message)
self.message = message
"""str: The exception message."""
self._errors = errors
+ self._details = details
self._response = response
+ self._error_info = error_info
def __str__(self):
- return "{} {}".format(self.code, self.message)
+ error_msg = "{} {}".format(self.code, self.message)
+ if self.details:
+ error_msg = "{} {}".format(error_msg, self.details)
+ # Note: This else condition can be removed once proposal A from
+ # b/284179390 is implemented.
+ else:
+ if self.errors:
+ errors = [
+ f"{error.code}: {error.message}"
+ for error in self.errors
+ if hasattr(error, "code") and hasattr(error, "message")
+ ]
+ if errors:
+ error_msg = "{} {}".format(error_msg, "\n".join(errors))
+ return error_msg
+
+ @property
+ def reason(self):
+ """The reason of the error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing reason of the error.
+ """
+ return self._error_info.reason if self._error_info else None
+
+ @property
+ def domain(self):
+ """The logical grouping to which the "reason" belongs.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs.
+ """
+ return self._error_info.domain if self._error_info else None
+
+ @property
+ def metadata(self):
+ """Additional structured details about this error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[Dict[str, str], None]: An optional object containing structured details about the error.
+ """
+ return self._error_info.metadata if self._error_info else None
@property
def errors(self):
@@ -130,6 +208,19 @@ def errors(self):
"""
return list(self._errors)
+ @property
+ def details(self):
+ """Information contained in google.rpc.status.details.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto
+
+ Returns:
+ Sequence[Any]: A list of structured objects from error_details.proto
+ """
+ return list(self._details)
+
@property
def response(self):
"""Optional[Union[requests.Request, grpc.Call]]: The response or
@@ -144,25 +235,25 @@ class Redirection(GoogleAPICallError):
class MovedPermanently(Redirection):
"""Exception mapping a ``301 Moved Permanently`` response."""
- code = http_client.MOVED_PERMANENTLY
+ code = http.client.MOVED_PERMANENTLY
class NotModified(Redirection):
"""Exception mapping a ``304 Not Modified`` response."""
- code = http_client.NOT_MODIFIED
+ code = http.client.NOT_MODIFIED
class TemporaryRedirect(Redirection):
"""Exception mapping a ``307 Temporary Redirect`` response."""
- code = http_client.TEMPORARY_REDIRECT
+ code = http.client.TEMPORARY_REDIRECT
class ResumeIncomplete(Redirection):
"""Exception mapping a ``308 Resume Incomplete`` response.
- .. note:: :attr:`http_client.PERMANENT_REDIRECT` is ``308``, but Google
+ .. note:: :attr:`http.client.PERMANENT_REDIRECT` is ``308``, but Google
APIs differ in their use of this status code.
"""
@@ -176,7 +267,7 @@ class ClientError(GoogleAPICallError):
class BadRequest(ClientError):
"""Exception mapping a ``400 Bad Request`` response."""
- code = http_client.BAD_REQUEST
+ code = http.client.BAD_REQUEST
class InvalidArgument(BadRequest):
@@ -201,7 +292,7 @@ class OutOfRange(BadRequest):
class Unauthorized(ClientError):
"""Exception mapping a ``401 Unauthorized`` response."""
- code = http_client.UNAUTHORIZED
+ code = http.client.UNAUTHORIZED
class Unauthenticated(Unauthorized):
@@ -213,7 +304,7 @@ class Unauthenticated(Unauthorized):
class Forbidden(ClientError):
"""Exception mapping a ``403 Forbidden`` response."""
- code = http_client.FORBIDDEN
+ code = http.client.FORBIDDEN
class PermissionDenied(Forbidden):
@@ -226,20 +317,20 @@ class NotFound(ClientError):
"""Exception mapping a ``404 Not Found`` response or a
:attr:`grpc.StatusCode.NOT_FOUND` error."""
- code = http_client.NOT_FOUND
+ code = http.client.NOT_FOUND
grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
class MethodNotAllowed(ClientError):
"""Exception mapping a ``405 Method Not Allowed`` response."""
- code = http_client.METHOD_NOT_ALLOWED
+ code = http.client.METHOD_NOT_ALLOWED
class Conflict(ClientError):
"""Exception mapping a ``409 Conflict`` response."""
- code = http_client.CONFLICT
+ code = http.client.CONFLICT
class AlreadyExists(Conflict):
@@ -257,26 +348,25 @@ class Aborted(Conflict):
class LengthRequired(ClientError):
"""Exception mapping a ``411 Length Required`` response."""
- code = http_client.LENGTH_REQUIRED
+ code = http.client.LENGTH_REQUIRED
class PreconditionFailed(ClientError):
"""Exception mapping a ``412 Precondition Failed`` response."""
- code = http_client.PRECONDITION_FAILED
+ code = http.client.PRECONDITION_FAILED
class RequestRangeNotSatisfiable(ClientError):
"""Exception mapping a ``416 Request Range Not Satisfiable`` response."""
- code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE
+ code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE
class TooManyRequests(ClientError):
"""Exception mapping a ``429 Too Many Requests`` response."""
- # http_client does not define a constant for this in Python 2.
- code = 429
+ code = http.client.TOO_MANY_REQUESTS
class ResourceExhausted(TooManyRequests):
@@ -289,8 +379,7 @@ class Cancelled(ClientError):
"""Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
# This maps to HTTP status code 499. See
- # https://github.com/googleapis/googleapis/blob/master/google/rpc\
- # /code.proto
+ # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
code = 499
grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
@@ -303,7 +392,7 @@ class InternalServerError(ServerError):
"""Exception mapping a ``500 Internal Server Error`` response. or a
:attr:`grpc.StatusCode.INTERNAL` error."""
- code = http_client.INTERNAL_SERVER_ERROR
+ code = http.client.INTERNAL_SERVER_ERROR
grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
@@ -323,28 +412,28 @@ class MethodNotImplemented(ServerError):
"""Exception mapping a ``501 Not Implemented`` response or a
:attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
- code = http_client.NOT_IMPLEMENTED
+ code = http.client.NOT_IMPLEMENTED
grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
class BadGateway(ServerError):
"""Exception mapping a ``502 Bad Gateway`` response."""
- code = http_client.BAD_GATEWAY
+ code = http.client.BAD_GATEWAY
class ServiceUnavailable(ServerError):
"""Exception mapping a ``503 Service Unavailable`` response or a
:attr:`grpc.StatusCode.UNAVAILABLE` error."""
- code = http_client.SERVICE_UNAVAILABLE
+ code = http.client.SERVICE_UNAVAILABLE
grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
class GatewayTimeout(ServerError):
"""Exception mapping a ``504 Gateway Timeout`` response."""
- code = http_client.GATEWAY_TIMEOUT
+ code = http.client.GATEWAY_TIMEOUT
class DeadlineExceeded(GatewayTimeout):
@@ -353,6 +442,12 @@ class DeadlineExceeded(GatewayTimeout):
grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
+class AsyncRestUnsupportedParameterError(NotImplementedError):
+ """Raised when an unsupported parameter is configured against async rest transport."""
+
+ pass
+
+
def exception_class_for_http_status(status_code):
"""Return the exception class for a specific HTTP status code.
@@ -387,6 +482,62 @@ def from_http_status(status_code, message, **kwargs):
return error
+def _format_rest_error_message(error, method, url):
+ method = method.upper() if method else None
+ message = "{method} {url}: {error}".format(
+ method=method,
+ url=url,
+ error=error,
+ )
+ return message
+
+
+# NOTE: We're moving away from `from_http_status` because it expects an aiohttp response compared
+# to `format_http_response_error` which expects a more abstract response from google.auth and is
+# compatible with both sync and async response types.
+# TODO(https://github.com/googleapis/python-api-core/issues/691): Add type hint for response.
+def format_http_response_error(
+ response, method: str, url: str, payload: Optional[Dict] = None
+):
+ """Create a :class:`GoogleAPICallError` from a google auth rest response.
+
+ Args:
+ response Union[google.auth.transport.Response, google.auth.aio.transport.Response]: The HTTP response.
+ method Optional(str): The HTTP request method.
+ url Optional(str): The HTTP request url.
+ payload Optional(dict): The HTTP response payload. If not passed in, it is read from response for a response type of google.auth.transport.Response.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`, with the message and errors populated
+ from the response.
+ """
+ payload = {} if not payload else payload
+ error_message = payload.get("error", {}).get("message", "unknown error")
+ errors = payload.get("error", {}).get("errors", ())
+ # In JSON, details are already formatted in developer-friendly way.
+ details = payload.get("error", {}).get("details", ())
+ error_info_list = list(
+ filter(
+ lambda detail: detail.get("@type", "")
+ == "type.googleapis.com/google.rpc.ErrorInfo",
+ details,
+ )
+ )
+ error_info = error_info_list[0] if error_info_list else None
+ message = _format_rest_error_message(error_message, method, url)
+
+ exception = from_http_status(
+ response.status_code,
+ message,
+ errors=errors,
+ details=details,
+ response=response,
+ error_info=error_info,
+ )
+ return exception
+
+
def from_http_response(response):
"""Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
@@ -402,18 +553,9 @@ def from_http_response(response):
payload = response.json()
except ValueError:
payload = {"error": {"message": response.text or "unknown error"}}
-
- error_message = payload.get("error", {}).get("message", "unknown error")
- errors = payload.get("error", {}).get("errors", ())
-
- message = "{method} {url}: {error}".format(
- method=response.request.method, url=response.request.url, error=error_message
- )
-
- exception = from_http_status(
- response.status_code, message, errors=errors, response=response
+ return format_http_response_error(
+ response, response.request.method, response.request.url, payload
)
- return exception
def exception_class_for_grpc_status(status_code):
@@ -432,7 +574,7 @@ def from_grpc_status(status_code, message, **kwargs):
"""Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`.
Args:
- status_code (grpc.StatusCode): The gRPC status code.
+ status_code (Union[grpc.StatusCode, int]): The gRPC status code.
message (str): The exception message.
kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
constructor.
@@ -441,6 +583,10 @@ def from_grpc_status(status_code, message, **kwargs):
GoogleAPICallError: An instance of the appropriate subclass of
:class:`GoogleAPICallError`.
"""
+
+ if isinstance(status_code, int):
+ status_code = _INT_TO_GRPC_CODE.get(status_code, status_code)
+
error_class = exception_class_for_grpc_status(status_code)
error = error_class(message, **kwargs)
@@ -454,6 +600,48 @@ def _is_informative_grpc_error(rpc_exc):
return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
+def _parse_grpc_error_details(rpc_exc):
+ if not rpc_status: # pragma: NO COVER
+ _warn_could_not_import_grpcio_status()
+ return [], None
+ try:
+ status = rpc_status.from_call(rpc_exc)
+ except NotImplementedError: # workaround
+ return [], None
+
+ if not status:
+ return [], None
+
+ possible_errors = [
+ error_details_pb2.BadRequest,
+ error_details_pb2.PreconditionFailure,
+ error_details_pb2.QuotaFailure,
+ error_details_pb2.ErrorInfo,
+ error_details_pb2.RetryInfo,
+ error_details_pb2.ResourceInfo,
+ error_details_pb2.RequestInfo,
+ error_details_pb2.DebugInfo,
+ error_details_pb2.Help,
+ error_details_pb2.LocalizedMessage,
+ ]
+ error_info = None
+ error_details = []
+ for detail in status.details:
+ matched_detail_cls = list(
+ filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors)
+ )
+ # If nothing matched, use detail directly.
+ if len(matched_detail_cls) == 0:
+ info = detail
+ else:
+ info = matched_detail_cls[0]()
+ detail.Unpack(info)
+ error_details.append(info)
+ if isinstance(info, error_details_pb2.ErrorInfo):
+ error_info = info
+ return error_details, error_info
+
+
def from_grpc_error(rpc_exc):
"""Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
@@ -466,9 +654,17 @@ def from_grpc_error(rpc_exc):
"""
# NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
# However, check for grpc.RpcError breaks backward compatibility.
- if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc):
+ if (
+ grpc is not None and isinstance(rpc_exc, grpc.Call)
+ ) or _is_informative_grpc_error(rpc_exc):
+ details, err_info = _parse_grpc_error_details(rpc_exc)
return from_grpc_status(
- rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc
+ rpc_exc.code(),
+ rpc_exc.details(),
+ errors=(rpc_exc,),
+ details=details,
+ response=rpc_exc,
+ error_info=err_info,
)
else:
return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py
new file mode 100644
index 00000000..d474632b
--- /dev/null
+++ b/google/api_core/extended_operation.py
@@ -0,0 +1,225 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for extended long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operations using :meth:`ExtendedOperation.result`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ extended_operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ def my_callback(ex_op):
+ print(f"Operation {ex_op.name} completed")
+
+ extended_operation.add_done_callback(my_callback)
+
+"""
+
+import threading
+
+from google.api_core import exceptions
+from google.api_core.future import polling
+
+
+class ExtendedOperation(polling.PollingFuture):
+ """An ExtendedOperation future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ extended_operation (proto.Message): The initial operation.
+ refresh (Callable[[], type(extended_operation)]): A callable that returns
+ the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel the operation.
+ polling Optional(google.api_core.retry.Retry): The configuration used
+ for polling. This can be used to control how often :meth:`done`
+ is polled. If the ``timeout`` argument to :meth:`result` is
+ specified it will override the ``polling.timeout`` property.
+ retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling``
+ instead. If specified it will override ``polling`` parameter to
+ maintain backward compatibility.
+
+ Note: Most long-running API methods use google.api_core.operation.Operation
+ This class is a wrapper for a subset of methods that use alternative
+ Long-Running Operation (LRO) semantics.
+
+ Note: there is not a concrete type the extended operation must be.
+ It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES:
+ * name: str
+ * status: Union[str, bool, enum.Enum]
+ * error_code: int
+ * error_message: str
+ """
+
+ def __init__(
+ self,
+ extended_operation,
+ refresh,
+ cancel,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs,
+ ):
+ super().__init__(polling=polling, **kwargs)
+ self._extended_operation = extended_operation
+ self._refresh = refresh
+ self._cancel = cancel
+ # Note: the extended operation does not give a good way to indicate cancellation.
+ # We make do with manually tracking cancellation and checking for doneness.
+ self._cancelled = False
+ self._completion_lock = threading.Lock()
+ # Invoke in case the operation came back already complete.
+ self._handle_refreshed_operation()
+
+ # Note: the following four properties MUST be overridden in a subclass
+ # if, and only if, the fields in the corresponding extended operation message
+ # have different names.
+ #
+ # E.g. we have an extended operation class that looks like
+ #
+ # class MyOperation(proto.Message):
+ # moniker = proto.Field(proto.STRING, number=1)
+ # status_msg = proto.Field(proto.STRING, number=2)
+ # optional http_error_code = proto.Field(proto.INT32, number=3)
+ # optional http_error_msg = proto.Field(proto.STRING, number=4)
+ #
+ # the ExtendedOperation subclass would provide property overrides that map
+ # to these (poorly named) fields.
+ @property
+ def name(self):
+ return self._extended_operation.name
+
+ @property
+ def status(self):
+ return self._extended_operation.status
+
+ @property
+ def error_code(self):
+ return self._extended_operation.error_code
+
+ @property
+ def error_message(self):
+ return self._extended_operation.error_message
+
+ def __getattr__(self, name):
+ return getattr(self._extended_operation, name)
+
+ def done(self, retry=None):
+ self._refresh_and_update(retry)
+ return self._extended_operation.done
+
+ def cancel(self):
+ if self.done():
+ return False
+
+ self._cancel()
+ self._cancelled = True
+ return True
+
+ def cancelled(self):
+ # TODO(dovs): there is not currently a good way to determine whether the
+ # operation has been cancelled.
+ # The best we can do is manually keep track of cancellation
+ # and check for doneness.
+ if not self._cancelled:
+ return False
+
+ self._refresh_and_update()
+ return self._extended_operation.done
+
+ def _refresh_and_update(self, retry=None):
+ if not self._extended_operation.done:
+ self._extended_operation = (
+ self._refresh(retry=retry) if retry else self._refresh()
+ )
+ self._handle_refreshed_operation()
+
+ def _handle_refreshed_operation(self):
+ with self._completion_lock:
+ if not self._extended_operation.done:
+ return
+
+ if self.error_code and self.error_message:
+ # Note: `errors` can be removed once proposal A from
+ # b/284179390 is implemented.
+ errors = []
+ if hasattr(self, "error") and hasattr(self.error, "errors"):
+ errors = self.error.errors
+ exception = exceptions.from_http_status(
+ status_code=self.error_code,
+ message=self.error_message,
+ response=self._extended_operation,
+ errors=errors,
+ )
+ self.set_exception(exception)
+ elif self.error_code or self.error_message:
+ exception = exceptions.GoogleAPICallError(
+ f"Unexpected error {self.error_code}: {self.error_message}"
+ )
+ self.set_exception(exception)
+ else:
+ # Extended operations have no payload.
+ self.set_result(None)
+
+ @classmethod
+ def make(cls, refresh, cancel, extended_operation, **kwargs):
+ """
+ Return an instantiated ExtendedOperation (or child) that wraps
+ * a refresh callable
+ * a cancel callable (can be a no-op)
+ * an initial result
+
+ .. note::
+ It is the caller's responsibility to set up refresh and cancel
+ with their correct request argument.
+ The reason for this is that the services that use Extended Operations
+ have rpcs that look something like the following:
+
+ // service.proto
+ service MyLongService {
+ rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_service) = "CustomOperationService";
+ }
+ }
+
+ service CustomOperationService {
+ rpc Get(GetOperationRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_polling_method) = true;
+ }
+ }
+
+ Any info needed for the poll, e.g. a name, path params, etc.
+ is held in the request, which the initial client method is in a much
+ better position to make made because the caller made the initial request.
+
+ TL;DR: the caller sets up closures for refresh and cancel that carry
+ the properly configured requests.
+
+ Args:
+ refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[][Any]): A callable that tries to cancel the operation
+ on a best effort basis.
+ extended_operation (Any): The initial response of the long running method.
+ See the docstring for ExtendedOperation.__init__ for requirements on
+ the type and fields of extended_operation
+ """
+ return cls(extended_operation, refresh, cancel, **kwargs)
diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py
index e1d158d0..325ee9cd 100644
--- a/google/api_core/future/async_future.py
+++ b/google/api_core/future/async_future.py
@@ -24,6 +24,7 @@
class _OperationNotComplete(Exception):
"""Private exception used for polling via retry."""
+
pass
@@ -42,8 +43,10 @@ class AsyncFuture(base.Future):
The :meth:`done` method should be implemented by subclasses. The polling
behavior will repeatedly call ``done`` until it returns True.
- .. note: Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
Args:
retry (google.api_core.retry.Retry): The retry configuration used
@@ -92,7 +95,7 @@ async def _blocking_poll(self, timeout=None):
if self._future.done():
return
- retry_ = self._retry.with_deadline(timeout)
+ retry_ = self._retry.with_timeout(timeout)
try:
await retry_(self._done_or_raise)()
@@ -145,7 +148,9 @@ def add_done_callback(self, fn):
is complete.
"""
if self._background_task is None:
- self._background_task = asyncio.get_event_loop().create_task(self._blocking_poll())
+ self._background_task = asyncio.get_event_loop().create_task(
+ self._blocking_poll()
+ )
self._future.add_done_callback(fn)
def set_result(self, result):
diff --git a/google/api_core/future/base.py b/google/api_core/future/base.py
index e7888ca3..f3005860 100644
--- a/google/api_core/future/base.py
+++ b/google/api_core/future/base.py
@@ -16,11 +16,8 @@
import abc
-import six
-
-@six.add_metaclass(abc.ABCMeta)
-class Future(object):
+class Future(object, metaclass=abc.ABCMeta):
# pylint: disable=missing-docstring
# We inherit the interfaces here from concurrent.futures.
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
index 6b4c687d..f1e2a188 100644
--- a/google/api_core/future/polling.py
+++ b/google/api_core/future/polling.py
@@ -18,7 +18,7 @@
import concurrent.futures
from google.api_core import exceptions
-from google.api_core import retry
+from google.api_core import retry as retries
from google.api_core.future import _helpers
from google.api_core.future import base
@@ -29,13 +29,37 @@ class _OperationNotComplete(Exception):
pass
-RETRY_PREDICATE = retry.if_exception_type(
+# DEPRECATED as it conflates RPC retry and polling concepts into one.
+# Use POLLING_PREDICATE instead to configure polling.
+RETRY_PREDICATE = retries.if_exception_type(
_OperationNotComplete,
exceptions.TooManyRequests,
exceptions.InternalServerError,
exceptions.BadGateway,
+ exceptions.ServiceUnavailable,
+)
+
+# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct
+# Retry object using its default values as a baseline for any custom retry logic
+# (not to be confused with polling logic).
+DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE)
+
+# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete.
+# Any RPC-specific errors (like ServiceUnavailable) will be handled
+# by retry logic (not to be confused with polling logic) which is triggered for
+# every polling RPC independently of polling logic but within its context.
+POLLING_PREDICATE = retries.if_exception_type(
+ _OperationNotComplete,
+)
+
+# Default polling configuration
+DEFAULT_POLLING = retries.Retry(
+ predicate=POLLING_PREDICATE,
+ initial=1.0, # seconds
+ maximum=20.0, # seconds
+ multiplier=1.5,
+ timeout=900, # seconds
)
-DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE)
class PollingFuture(base.Future):
@@ -44,19 +68,29 @@ class PollingFuture(base.Future):
The :meth:`done` method should be implemented by subclasses. The polling
behavior will repeatedly call ``done`` until it returns True.
- .. note: Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
+ The actual polling logic is encapsulated in :meth:`result` method. See
+ documentation for that method for details on how polling works.
+
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
Args:
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in :meth:`result` method it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead.
+ If set, it will override ``polling`` parameter for backward
+ compatibility.
"""
- def __init__(self, retry=DEFAULT_RETRY):
+ _DEFAULT_VALUE = object()
+
+ def __init__(self, polling=DEFAULT_POLLING, **kwargs):
super(PollingFuture, self).__init__()
- self._retry = retry
+ self._polling = kwargs.get("retry", polling)
self._result = None
self._exception = None
self._result_set = False
@@ -66,11 +100,13 @@ def __init__(self, retry=DEFAULT_RETRY):
self._done_callbacks = []
@abc.abstractmethod
- def done(self, retry=DEFAULT_RETRY):
+ def done(self, retry=None):
"""Checks to see if the operation is complete.
Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC (to not be confused with polling configuration. See
+ the documentation for :meth:`result` for details).
Returns:
bool: True if the operation is complete, False otherwise.
@@ -78,42 +114,136 @@ def done(self, retry=DEFAULT_RETRY):
# pylint: disable=redundant-returns-doc, missing-raises-doc
raise NotImplementedError()
- def _done_or_raise(self):
+ def _done_or_raise(self, retry=None):
"""Check if the future is done and raise if it's not."""
- if not self.done():
+ if not self.done(retry=retry):
raise _OperationNotComplete()
def running(self):
"""True if the operation is currently running."""
return not self.done()
- def _blocking_poll(self, timeout=None):
- """Poll and wait for the Future to be resolved.
+ def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Poll and wait for the Future to be resolved."""
- Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
- """
if self._result_set:
return
- retry_ = self._retry.with_deadline(timeout)
+ polling = polling or self._polling
+ if timeout is not PollingFuture._DEFAULT_VALUE:
+ polling = polling.with_timeout(timeout)
try:
- retry_(self._done_or_raise)()
+ polling(self._done_or_raise)(retry=retry)
except exceptions.RetryError:
raise concurrent.futures.TimeoutError(
- "Operation did not complete within the designated " "timeout."
+ f"Operation did not complete within the designated timeout of "
+ f"{polling.timeout} seconds."
)
- def result(self, timeout=None):
- """Get the result of the operation, blocking if necessary.
+ def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Get the result of the operation.
+
+ This method will poll for operation status periodically, blocking if
+ necessary. If you just want to make sure that this method does not block
+ for more than X seconds and you do not care about the nitty-gritty of
+ how this method operates, just call it with ``result(timeout=X)``. The
+ other parameters are for advanced use only.
+
+ Every call to this method is controlled by the following three
+ parameters, each of which has a specific, distinct role, even though all three
+ may look very similar: ``timeout``, ``retry`` and ``polling``. In most
+ cases users do not need to specify any custom values for any of these
+ parameters and may simply rely on default ones instead.
+
+ If you choose to specify custom parameters, please make sure you've
+ read the documentation below carefully.
+
+ First, please check :class:`google.api_core.retry.Retry`
+ class documentation for the proper definition of timeout and deadline
+ terms and for the definition the three different types of timeouts.
+ This class operates in terms of Retry Timeout and Polling Timeout. It
+ does not let customizing RPC timeout and the user is expected to rely on
+ default behavior for it.
+
+ The roles of each argument of this method are as follows:
+
+ ``timeout`` (int): (Optional) The Polling Timeout as defined in
+ :class:`google.api_core.retry.Retry`. If the operation does not complete
+ within this timeout an exception will be thrown. This parameter affects
+ neither Retry Timeout nor RPC Timeout.
+
+ ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. The ``retry.timeout`` property of this parameter is the
+ Retry Timeout as defined in :class:`google.api_core.retry.Retry`.
+ This parameter defines ONLY how the polling RPC call is retried
+ (i.e. what to do if the RPC we used for polling returned an error). It
+ does NOT define how the polling is done (i.e. how frequently and for
+ how long to call the polling RPC); use the ``polling`` parameter for that.
+ If a polling RPC throws and error and retrying it fails, the whole
+ future fails with the corresponding exception. If you want to tune which
+ server response error codes are not fatal for operation polling, use this
+ parameter to control that (``retry.predicate`` in particular).
+
+ ``polling`` (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call the polling RPC periodically (i.e. what to do if
+ a polling rpc returned successfully but its returned result indicates
+ that the long running operation is not completed yet, so we need to
+ check it again at some point in future). This parameter does NOT define
+ how to retry each individual polling RPC in case of an error; use the
+ ``retry`` parameter for that. The ``polling.timeout`` of this parameter
+ is Polling Timeout as defined in as defined in
+ :class:`google.api_core.retry.Retry`.
+
+ For each of the arguments, there are also default values in place, which
+ will be used if a user does not specify their own. The default values
+ for the three parameters are not to be confused with the default values
+ for the corresponding arguments in this method (those serve as "not set"
+ markers for the resolution logic).
+
+ If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note
+ the ``None`` value means "infinite timeout"), it will be used to control
+ the actual Polling Timeout. Otherwise, the ``polling.timeout`` value
+ will be used instead (see below for how the ``polling`` config itself
+ gets resolved). In other words, this parameter effectively overrides
+ the ``polling.timeout`` value if specified. This is so to preserve
+ backward compatibility.
+
+ If ``retry`` is provided (i.e. ``retry is not None``) it will be used to
+ control retry behavior for the polling RPC and the ``retry.timeout``
+ will determine the Retry Timeout. If not provided, the
+ polling RPC will be called with whichever default retry config was
+ specified for the polling RPC at the moment of the construction of the
+ polling RPC's client. For example, if the polling RPC is
+ ``operations_client.get_operation()``, the ``retry`` parameter will be
+ controlling its retry behavior (not polling behavior) and, if not
+ specified, that specific method (``operations_client.get_operation()``)
+ will be retried according to the default retry config provided during
+ creation of ``operations_client`` client instead. This argument exists
+ mainly for backward compatibility; users are very unlikely to ever need
+ to set this parameter explicitly.
+
+ If ``polling`` is provided (i.e. ``polling is not None``), it will be used
+ to control the overall polling behavior and ``polling.timeout`` will
+ control Polling Timeout unless it is overridden by ``timeout`` parameter
+ as described above. If not provided, the``polling`` parameter specified
+ during construction of this future (the ``polling`` argument in the
+ constructor) will be used instead. Note: since the ``timeout`` argument may
+ override ``polling.timeout`` value, this parameter should be viewed as
+ coupled with the ``timeout`` parameter as described above.
Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
+ timeout (int): (Optional) How long (in seconds) to wait for the
+ operation to complete. If None, wait indefinitely.
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. This defines ONLY how the polling RPC call is
+ retried (i.e. what to do if the RPC we used for polling returned
+ an error). It does NOT define how the polling is done (i.e. how
+ frequently and for how long to call the polling RPC).
+ polling (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call polling RPC periodically. This parameter
+ does NOT define how to retry each individual polling RPC call
+ (use the ``retry`` parameter for that).
Returns:
google.protobuf.Message: The Operation's result.
@@ -122,7 +252,8 @@ def result(self, timeout=None):
google.api_core.GoogleAPICallError: If the operation errors or if
the timeout is reached before the operation completes.
"""
- self._blocking_poll(timeout=timeout)
+
+ self._blocking_poll(timeout=timeout, retry=retry, polling=polling)
if self._exception is not None:
# pylint: disable=raising-bad-type
@@ -131,12 +262,18 @@ def result(self, timeout=None):
return self._result
- def exception(self, timeout=None):
+ def exception(self, timeout=_DEFAULT_VALUE):
"""Get the exception from the operation, blocking if necessary.
+ See the documentation for the :meth:`result` method for details on how
+ this method operates, as both ``result`` and this method rely on the
+ exact same polling logic. The only difference is that this method does
+ not accept ``retry`` and ``polling`` arguments but relies on the default ones
+ instead.
+
Args:
timeout (int): How long to wait for the operation to complete.
- If None, wait indefinitely.
+ If None, wait indefinitely.
Returns:
Optional[google.api_core.GoogleAPICallError]: The operation's
diff --git a/google/api_core/gapic_v1/__init__.py b/google/api_core/gapic_v1/__init__.py
index ed95da13..e5b7ad35 100644
--- a/google/api_core/gapic_v1/__init__.py
+++ b/google/api_core/gapic_v1/__init__.py
@@ -12,17 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
-
from google.api_core.gapic_v1 import client_info
from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1 import config_async
from google.api_core.gapic_v1 import method
+from google.api_core.gapic_v1 import method_async
from google.api_core.gapic_v1 import routing_header
-__all__ = ["client_info", "config", "method", "routing_header"]
-
-if sys.version_info >= (3, 6):
- from google.api_core.gapic_v1 import config_async # noqa: F401
- from google.api_core.gapic_v1 import method_async # noqa: F401
- __all__.append("config_async")
- __all__.append("method_async")
+__all__ = [
+ "client_info",
+ "config",
+ "config_async",
+ "method",
+ "method_async",
+ "routing_header",
+]
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
index bdc2ce44..4b3b5649 100644
--- a/google/api_core/gapic_v1/client_info.py
+++ b/google/api_core/gapic_v1/client_info.py
@@ -33,10 +33,10 @@ class ClientInfo(client_info.ClientInfo):
Args:
python_version (str): The Python interpreter version, for example,
- ``'2.7.13'``.
+ ``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The sversion of gapic-generated client
+ gapic_version (Optional[str]): The version of gapic-generated client
library, if the library was generated by gapic.
client_library_version (Optional[str]): The version of the client
library, generally used if the client library was not generated
@@ -45,6 +45,9 @@ class ClientInfo(client_info.ClientInfo):
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
+ protobuf_runtime_version (Optional[str]): The protobuf runtime version.
"""
def to_grpc_metadata(self):
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
index 2a56cf1b..36b50d9f 100644
--- a/google/api_core/gapic_v1/config.py
+++ b/google/api_core/gapic_v1/config.py
@@ -21,7 +21,6 @@
import collections
import grpc
-import six
from google.api_core import exceptions
from google.api_core import retry
@@ -34,6 +33,9 @@
def _exception_class_for_grpc_status_name(name):
"""Returns the Google API exception class for a gRPC error code name.
+ DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method
+ directly instead.
+
Args:
name (str): The name of the gRPC status code, for example,
``UNAVAILABLE``.
@@ -48,6 +50,8 @@ def _exception_class_for_grpc_status_name(name):
def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
"""Creates a Retry object given a gapic retry configuration.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
retry_params (dict): The retry parameter values, for example::
@@ -82,6 +86,8 @@ def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
def _timeout_from_retry_config(retry_params):
"""Creates a ExponentialTimeout object given a gapic retry configuration.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
retry_params (dict): The retry parameter values, for example::
@@ -114,6 +120,8 @@ def parse_method_configs(interface_config, retry_impl=retry.Retry):
"""Creates default retry and timeout objects for each method in a gapic
interface config.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
interface_config (Mapping): The interface config section of the full
gapic library config. For example, If the full configuration has
@@ -130,30 +138,28 @@ def parse_method_configs(interface_config, retry_impl=retry.Retry):
# Grab all the retry codes
retry_codes_map = {
name: retry_codes
- for name, retry_codes in six.iteritems(interface_config.get("retry_codes", {}))
+ for name, retry_codes in interface_config.get("retry_codes", {}).items()
}
# Grab all of the retry params
retry_params_map = {
name: retry_params
- for name, retry_params in six.iteritems(
- interface_config.get("retry_params", {})
- )
+ for name, retry_params in interface_config.get("retry_params", {}).items()
}
# Iterate through all the API methods and create a flat MethodConfig
# instance for each one.
method_configs = {}
- for method_name, method_params in six.iteritems(
- interface_config.get("methods", {})
- ):
+ for method_name, method_params in interface_config.get("methods", {}).items():
retry_params_name = method_params.get("retry_params_name")
if retry_params_name is not None:
retry_params = retry_params_map[retry_params_name]
retry_ = _retry_from_retry_config(
- retry_params, retry_codes_map[method_params["retry_codes_name"]], retry_impl
+ retry_params,
+ retry_codes_map[method_params["retry_codes_name"]],
+ retry_impl,
)
timeout_ = _timeout_from_retry_config(retry_params)
diff --git a/google/api_core/gapic_v1/config_async.py b/google/api_core/gapic_v1/config_async.py
index 00e5e240..13d6a480 100644
--- a/google/api_core/gapic_v1/config_async.py
+++ b/google/api_core/gapic_v1/config_async.py
@@ -38,5 +38,5 @@ def parse_method_configs(interface_config):
configuration.
"""
return config.parse_method_configs(
- interface_config,
- retry_impl=retry_async.AsyncRetry)
+ interface_config, retry_impl=retry_async.AsyncRetry
+ )
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
index 8bf82569..0f14ea9c 100644
--- a/google/api_core/gapic_v1/method.py
+++ b/google/api_core/gapic_v1/method.py
@@ -15,17 +15,30 @@
"""Helpers for wrapping low-level gRPC methods with common functionality.
This is used by gapic clients to provide common error mapping, retry, timeout,
-pagination, and long-running operations to gRPC methods.
+compression, pagination, and long-running operations to gRPC methods.
"""
-from google.api_core import general_helpers
+import enum
+import functools
+
from google.api_core import grpc_helpers
-from google.api_core import timeout
from google.api_core.gapic_v1 import client_info
+from google.api_core.timeout import TimeToDeadlineTimeout
USE_DEFAULT_METADATA = object()
-DEFAULT = object()
-"""Sentinel value indicating that a retry or timeout argument was unspecified,
+
+
+class _MethodDefault(enum.Enum):
+ # Uses enum so that pytype/mypy knows that this is the only possible value.
+ # https://stackoverflow.com/a/60605919/101923
+ #
+ # Literal[_DEFAULT_VALUE] is an alternative, but only added in Python 3.8.
+ # https://docs.python.org/3/library/typing.html#typing.Literal
+ _DEFAULT_VALUE = object()
+
+
+DEFAULT = _MethodDefault._DEFAULT_VALUE
+"""Sentinel value indicating that a retry, timeout, or compression argument was unspecified,
so the default should be used."""
@@ -39,55 +52,14 @@ def _apply_decorators(func, decorators):
``decorators`` may contain items that are ``None`` or ``False`` which will
be ignored.
"""
- decorators = filter(_is_not_none_or_false, reversed(decorators))
+ filtered_decorators = filter(_is_not_none_or_false, reversed(decorators))
- for decorator in decorators:
+ for decorator in filtered_decorators:
func = decorator(func)
return func
-def _determine_timeout(default_timeout, specified_timeout, retry):
- """Determines how timeout should be applied to a wrapped method.
-
- Args:
- default_timeout (Optional[Timeout]): The default timeout specified
- at method creation time.
- specified_timeout (Optional[Timeout]): The timeout specified at
- invocation time. If :attr:`DEFAULT`, this will be set to
- the ``default_timeout``.
- retry (Optional[Retry]): The retry specified at invocation time.
-
- Returns:
- Optional[Timeout]: The timeout to apply to the method or ``None``.
- """
- # If timeout is specified as a number instead of a Timeout instance,
- # convert it to a ConstantTimeout.
- if isinstance(specified_timeout, (int, float)):
- specified_timeout = timeout.ConstantTimeout(specified_timeout)
- if isinstance(default_timeout, (int, float)):
- default_timeout = timeout.ConstantTimeout(default_timeout)
-
- if specified_timeout is DEFAULT:
- specified_timeout = default_timeout
-
- if specified_timeout is default_timeout:
- # If timeout is the default and the default timeout is exponential and
- # a non-default retry is specified, make sure the timeout's deadline
- # matches the retry's. This handles the case where the user leaves
- # the timeout default but specifies a lower deadline via the retry.
- if (
- retry
- and retry is not DEFAULT
- and isinstance(default_timeout, timeout.ExponentialTimeout)
- ):
- return default_timeout.with_deadline(retry._deadline)
- else:
- return default_timeout
-
- return specified_timeout
-
-
class _GapicCallable(object):
"""Callable that applies retry, timeout, and metadata logic.
@@ -95,41 +67,53 @@ class _GapicCallable(object):
target (Callable): The low-level RPC method.
retry (google.api_core.retry.Retry): The default retry for the
callable. If ``None``, this callable will not retry by default
- timeout (google.api_core.timeout.Timeout): The default timeout
- for the callable. If ``None``, this callable will not specify
- a timeout argument to the low-level RPC method by default.
+ timeout (google.api_core.timeout.Timeout): The default timeout for the
+ callable (i.e. duration of time within which an RPC must terminate
+ after its start, not to be confused with deadline). If ``None``,
+ this callable will not specify a timeout argument to the low-level
+ RPC method.
+ compression (grpc.Compression): The default compression for the callable.
+ If ``None``, this callable will not specify a compression argument
+ to the low-level RPC method.
metadata (Sequence[Tuple[str, str]]): Additional metadata that is
provided to the RPC method on every invocation. This is merged with
any metadata specified during invocation. If ``None``, no
additional metadata will be passed to the RPC method.
"""
- def __init__(self, target, retry, timeout, metadata=None):
+ def __init__(
+ self,
+ target,
+ retry,
+ timeout,
+ compression,
+ metadata=None,
+ ):
self._target = target
self._retry = retry
self._timeout = timeout
+ self._compression = compression
self._metadata = metadata
- def __call__(self, *args, **kwargs):
- """Invoke the low-level RPC with retry, timeout, and metadata."""
- # Note: Due to Python 2 lacking keyword-only arguments we use kwargs to
- # extract the retry and timeout params.
- timeout_ = _determine_timeout(
- self._timeout,
- kwargs.pop("timeout", self._timeout),
- # Use only the invocation-specified retry only for this, as we only
- # want to adjust the timeout deadline if the *user* specified
- # a different retry.
- kwargs.get("retry", None),
- )
-
- retry = kwargs.pop("retry", self._retry)
+ def __call__(
+ self, *args, timeout=DEFAULT, retry=DEFAULT, compression=DEFAULT, **kwargs
+ ):
+ """Invoke the low-level RPC with retry, timeout, compression, and metadata."""
if retry is DEFAULT:
retry = self._retry
+ if timeout is DEFAULT:
+ timeout = self._timeout
+
+ if compression is DEFAULT:
+ compression = self._compression
+
+ if isinstance(timeout, (int, float)):
+ timeout = TimeToDeadlineTimeout(timeout=timeout)
+
# Apply all applicable decorators.
- wrapped_func = _apply_decorators(self._target, [retry, timeout_])
+ wrapped_func = _apply_decorators(self._target, [retry, timeout])
# Add the user agent metadata to the call.
if self._metadata is not None:
@@ -141,6 +125,8 @@ def __call__(self, *args, **kwargs):
metadata = list(metadata)
metadata.extend(self._metadata)
kwargs["metadata"] = metadata
+ if self._compression is not None:
+ kwargs["compression"] = compression
return wrapped_func(*args, **kwargs)
@@ -149,12 +135,15 @@ def wrap_method(
func,
default_retry=None,
default_timeout=None,
+ default_compression=None,
client_info=client_info.DEFAULT_CLIENT_INFO,
+ *,
+ with_call=False,
):
"""Wrap an RPC method with common behavior.
- This applies common error wrapping, retry, and timeout behavior a function.
- The wrapped function will take optional ``retry`` and ``timeout``
+ This applies common error wrapping, retry, timeout, and compression behavior to a function.
+ The wrapped function will take optional ``retry``, ``timeout``, and ``compression``
arguments.
For example::
@@ -162,6 +151,7 @@ def wrap_method(
import google.api_core.gapic_v1.method
from google.api_core import retry
from google.api_core import timeout
+ from grpc import Compression
# The original RPC method.
def get_topic(name, timeout=None):
@@ -170,6 +160,7 @@ def get_topic(name, timeout=None):
default_retry = retry.Retry(deadline=60)
default_timeout = timeout.Timeout(deadline=60)
+ default_compression = Compression.NoCompression
wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
get_topic, default_retry)
@@ -218,27 +209,45 @@ def get_topic(name, timeout=None):
default_timeout (Optional[google.api_core.Timeout]): The default
timeout strategy. Can also be specified as an int or float. If
``None``, the method will not have timeout specified by default.
+ default_compression (Optional[grpc.Compression]): The default
+ grpc.Compression. If ``None``, the method will not have
+ compression specified by default.
client_info
(Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
Client information used to create a user-agent string that's
passed as gRPC metadata to the method. If unspecified, then
a sane default will be used. If ``None``, then no user agent
metadata will be provided to the RPC method.
+ with_call (bool): If True, wrapped grpc.UnaryUnaryMulticallables will
+ return a tuple of (response, grpc.Call) instead of just the response.
+ This is useful for extracting trailing metadata from unary calls.
+ Defaults to False.
Returns:
- Callable: A new callable that takes optional ``retry`` and ``timeout``
- arguments and applies the common error mapping, retry, timeout,
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression``
+ arguments and applies the common error mapping, retry, timeout, compression,
and metadata behavior to the low-level RPC method.
"""
+ if with_call:
+ try:
+ func = func.with_call
+ except AttributeError as exc:
+ raise ValueError(
+ "with_call=True is only supported for unary calls."
+ ) from exc
func = grpc_helpers.wrap_errors(func)
-
if client_info is not None:
user_agent_metadata = [client_info.to_grpc_metadata()]
else:
user_agent_metadata = None
- return general_helpers.wraps(func)(
+ return functools.wraps(func)(
_GapicCallable(
- func, default_retry, default_timeout, metadata=user_agent_metadata
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=user_agent_metadata,
)
)
diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py
index 5210b2b7..c0f38c0e 100644
--- a/google/api_core/gapic_v1/method_async.py
+++ b/google/api_core/gapic_v1/method_async.py
@@ -14,32 +14,46 @@
"""AsyncIO helpers for wrapping gRPC methods with common functionality.
This is used by gapic clients to provide common error mapping, retry, timeout,
-pagination, and long-running operations to gRPC methods.
+compression, pagination, and long-running operations to gRPC methods.
"""
-from google.api_core import general_helpers, grpc_helpers_async
+import functools
+
+from google.api_core import grpc_helpers_async
from google.api_core.gapic_v1 import client_info
-from google.api_core.gapic_v1.method import (_GapicCallable, # noqa: F401
- DEFAULT,
- USE_DEFAULT_METADATA)
+from google.api_core.gapic_v1.method import _GapicCallable
+from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
+from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401
+
+_DEFAULT_ASYNC_TRANSPORT_KIND = "grpc_asyncio"
def wrap_method(
- func,
- default_retry=None,
- default_timeout=None,
- client_info=client_info.DEFAULT_CLIENT_INFO,
+ func,
+ default_retry=None,
+ default_timeout=None,
+ default_compression=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+ kind=_DEFAULT_ASYNC_TRANSPORT_KIND,
):
"""Wrap an async RPC method with common behavior.
Returns:
- Callable: A new callable that takes optional ``retry`` and ``timeout``
- arguments and applies the common error mapping, retry, timeout,
- and metadata behavior to the low-level RPC method.
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression`` arguments and applies the common error mapping,
+ retry, timeout, metadata, and compression behavior to the low-level RPC method.
"""
- func = grpc_helpers_async.wrap_errors(func)
+ if kind == _DEFAULT_ASYNC_TRANSPORT_KIND:
+ func = grpc_helpers_async.wrap_errors(func)
metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
- return general_helpers.wraps(func)(_GapicCallable(
- func, default_retry, default_timeout, metadata=metadata))
+ return functools.wraps(func)(
+ _GapicCallable(
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=metadata,
+ )
+ )
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
index 3fb12a6f..c0c6f648 100644
--- a/google/api_core/gapic_v1/routing_header.py
+++ b/google/api_core/gapic_v1/routing_header.py
@@ -20,43 +20,68 @@
Generally, these headers are specified as gRPC metadata.
"""
-import sys
-
-from six.moves.urllib.parse import urlencode
+import functools
+from enum import Enum
+from urllib.parse import urlencode
ROUTING_METADATA_KEY = "x-goog-request-params"
+# This is the value for the `maxsize` argument of @functools.lru_cache
+# https://docs.python.org/3/library/functools.html#functools.lru_cache
+# This represents the number of recent function calls to store.
+ROUTING_PARAM_CACHE_SIZE = 32
-def to_routing_header(params):
+def to_routing_header(params, qualified_enums=True):
"""Returns a routing header string for the given request parameters.
Args:
- params (Mapping[str, Any]): A dictionary containing the request
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
Returns:
str: The routing header string.
"""
- if sys.version_info[0] < 3:
- # Python 2 does not have the "safe" parameter for urlencode.
- return urlencode(params).replace("%2F", "/")
- return urlencode(
- params,
- # Per Google API policy (go/api-url-encoding), / is not encoded.
- safe="/",
- )
+ tuples = params.items() if isinstance(params, dict) else params
+ if not qualified_enums:
+ tuples = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples]
+ return "&".join([_urlencode_param(*t) for t in tuples])
-def to_grpc_metadata(params):
+def to_grpc_metadata(params, qualified_enums=True):
"""Returns the gRPC metadata containing the routing headers for the given
request parameters.
Args:
- params (Mapping[str, Any]): A dictionary containing the request
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
Returns:
Tuple(str, str): The gRPC metadata containing the routing header key
and value.
"""
- return (ROUTING_METADATA_KEY, to_routing_header(params))
+ return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums))
+
+
+# use caching to avoid repeated computation
+@functools.lru_cache(maxsize=ROUTING_PARAM_CACHE_SIZE)
+def _urlencode_param(key, value):
+ """Cacheable wrapper over urlencode
+
+ Args:
+ key (str): The key of the parameter to encode.
+ value (str | bytes | Enum): The value of the parameter to encode.
+
+ Returns:
+ str: The encoded parameter.
+ """
+ return urlencode(
+ {key: value},
+ # Per Google API policy (go/api-url-encoding), / is not encoded.
+ safe="/",
+ )
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
index d2d0c440..a6af45b7 100644
--- a/google/api_core/general_helpers.py
+++ b/google/api_core/general_helpers.py
@@ -12,22 +12,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Helpers for general Python functionality."""
-
-import functools
-
-import six
-
-
-# functools.partial objects lack several attributes present on real function
-# objects. In Python 2 wraps fails on this so use a restricted set instead.
-_PARTIAL_VALID_ASSIGNMENTS = ("__doc__",)
-
-
-def wraps(wrapped):
- """A functools.wraps helper that handles partial objects on Python 2."""
- # https://github.com/google/pytype/issues/322
- if isinstance(wrapped, functools.partial): # pytype: disable=wrong-arg-types
- return six.wraps(wrapped, assigned=_PARTIAL_VALID_ASSIGNMENTS)
- else:
- return six.wraps(wrapped)
+# This import for backward compatibility only.
+from functools import wraps # noqa: F401 pragma: NO COVER
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
index dfc8442d..07963024 100644
--- a/google/api_core/grpc_helpers.py
+++ b/google/api_core/grpc_helpers.py
@@ -13,29 +13,48 @@
# limitations under the License.
"""Helpers for :mod:`grpc`."""
+from typing import Generic, Iterator, Optional, TypeVar
import collections
+import functools
+import warnings
import grpc
-import six
from google.api_core import exceptions
-from google.api_core import general_helpers
import google.auth
import google.auth.credentials
import google.auth.transport.grpc
import google.auth.transport.requests
+import google.protobuf
-try:
- import grpc_gcp
+PROTOBUF_VERSION = google.protobuf.__version__
- HAS_GRPC_GCP = True
-except ImportError:
+# The grpcio-gcp package only has support for protobuf < 4
+if PROTOBUF_VERSION[0:2] == "3.": # pragma: NO COVER
+ try:
+ import grpc_gcp
+
+ warnings.warn(
+ """Support for grpcio-gcp is deprecated. This feature will be
+ removed from `google-api-core` after January 1, 2024. If you need to
+ continue to use this feature, please pin to a specific version of
+ `google-api-core`.""",
+ DeprecationWarning,
+ )
+ HAS_GRPC_GCP = True
+ except ImportError:
+ HAS_GRPC_GCP = False
+else:
HAS_GRPC_GCP = False
+
# The list of gRPC Callable interfaces that return iterators.
_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
+
def _patch_callable_name(callable_):
"""Fix-up gRPC callable attributes.
@@ -51,17 +70,17 @@ def _wrap_unary_errors(callable_):
"""Map errors for Unary-Unary and Stream-Unary gRPC callables."""
_patch_callable_name(callable_)
- @six.wraps(callable_)
+ @functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
try:
return callable_(*args, **kwargs)
except grpc.RpcError as exc:
- six.raise_from(exceptions.from_grpc_error(exc), exc)
+ raise exceptions.from_grpc_error(exc) from exc
return error_remapped_callable
-class _StreamingResponseIterator(grpc.Call):
+class _StreamingResponseIterator(Generic[P], grpc.Call):
def __init__(self, wrapped, prefetch_first_result=True):
self._wrapped = wrapped
@@ -70,7 +89,7 @@ def __init__(self, wrapped, prefetch_first_result=True):
# to retrieve the first result, in order to fail, in order to trigger a retry.
try:
if prefetch_first_result:
- self._stored_first_result = six.next(self._wrapped)
+ self._stored_first_result = next(self._wrapped)
except TypeError:
# It is possible the wrapped method isn't an iterable (a grpc.Call
# for instance). If this happens don't store the first result.
@@ -79,11 +98,11 @@ def __init__(self, wrapped, prefetch_first_result=True):
# ignore stop iteration at this time. This should be handled outside of retry.
pass
- def __iter__(self):
+ def __iter__(self) -> Iterator[P]:
"""This iterator is also an iterable that returns itself."""
return self
- def next(self):
+ def __next__(self) -> P:
"""Get the next response from the stream.
Returns:
@@ -94,13 +113,10 @@ def next(self):
result = self._stored_first_result
del self._stored_first_result
return result
- return six.next(self._wrapped)
+ return next(self._wrapped)
except grpc.RpcError as exc:
# If the stream has already returned data, we cannot recover here.
- six.raise_from(exceptions.from_grpc_error(exc), exc)
-
- # Alias needed for Python 2/3 support.
- __next__ = next
+ raise exceptions.from_grpc_error(exc) from exc
# grpc.Call & grpc.RpcContext interface
@@ -129,6 +145,10 @@ def trailing_metadata(self):
return self._wrapped.trailing_metadata()
+# public type alias denoting the return type of streaming gapic calls
+GrpcStream = _StreamingResponseIterator[P]
+
+
def _wrap_stream_errors(callable_):
"""Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
@@ -138,7 +158,7 @@ def _wrap_stream_errors(callable_):
"""
_patch_callable_name(callable_)
- @general_helpers.wraps(callable_)
+ @functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
try:
result = callable_(*args, **kwargs)
@@ -147,9 +167,11 @@ def error_remapped_callable(*args, **kwargs):
# hidden flag to see if pre-fetching is disabled.
# https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257
prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
- return _StreamingResponseIterator(result, prefetch_first_result=prefetch_first)
+ return _StreamingResponseIterator(
+ result, prefetch_first_result=prefetch_first
+ )
except grpc.RpcError as exc:
- six.raise_from(exceptions.from_grpc_error(exc), exc)
+ raise exceptions.from_grpc_error(exc) from exc
return error_remapped_callable
@@ -177,11 +199,14 @@ def wrap_errors(callable_):
def _create_composite_credentials(
- credentials=None,
- credentials_file=None,
- scopes=None,
- ssl_credentials=None,
- quota_project_id=None):
+ credentials=None,
+ credentials_file=None,
+ default_scopes=None,
+ scopes=None,
+ ssl_credentials=None,
+ quota_project_id=None,
+ default_host=None,
+):
"""Create the composite credentials for secure channels.
Args:
@@ -191,12 +216,28 @@ def _create_composite_credentials(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
+ default_scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
scopes (Sequence[str]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
credentials. This can be used to specify different certificates.
quota_project_id (str): An optional project to use for billing and quota.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
Returns:
grpc.ChannelCredentials: The composed channel credentials object.
@@ -210,42 +251,68 @@ def _create_composite_credentials(
)
if credentials_file:
- credentials, _ = google.auth.load_credentials_from_file(credentials_file, scopes=scopes)
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, default_scopes=default_scopes
+ )
elif credentials:
- credentials = google.auth.credentials.with_scopes_if_required(credentials, scopes)
+ credentials = google.auth.credentials.with_scopes_if_required(
+ credentials, scopes=scopes, default_scopes=default_scopes
+ )
else:
- credentials, _ = google.auth.default(scopes=scopes)
+ credentials, _ = google.auth.default(
+ scopes=scopes, default_scopes=default_scopes
+ )
- if quota_project_id:
+ if quota_project_id and isinstance(
+ credentials, google.auth.credentials.CredentialsWithQuotaProject
+ ):
credentials = credentials.with_quota_project(quota_project_id)
request = google.auth.transport.requests.Request()
# Create the metadata plugin for inserting the authorization header.
metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
- credentials, request
+ credentials,
+ request,
+ default_host=default_host,
)
# Create a set of grpc.CallCredentials using the metadata plugin.
google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
- if ssl_credentials is None:
- ssl_credentials = grpc.ssl_channel_credentials()
-
- # Combine the ssl credentials and the authorization credentials.
- return grpc.composite_channel_credentials(
- ssl_credentials, google_auth_credentials
- )
+ # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of
+ # `grpc.compute_engine_channel_credentials` as the former supports passing
+ # `ssl_credentials` via `channel_credentials` which is needed for mTLS.
+ if ssl_credentials:
+ # Combine the ssl credentials and the authorization credentials.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials
+ return grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+ else:
+ # Use grpc.compute_engine_channel_credentials in order to support Direct Path.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials
+ # TODO(https://github.com/googleapis/python-api-core/issues/598):
+ # Although `grpc.compute_engine_channel_credentials` returns channel credentials
+ # outside of a Google Compute Engine environment (GCE), we should determine if
+ # there is a way to reliably detect a GCE environment so that
+ # `grpc.compute_engine_channel_credentials` is not called outside of GCE.
+ return grpc.compute_engine_channel_credentials(google_auth_credentials)
def create_channel(
- target,
- credentials=None,
- scopes=None,
- ssl_credentials=None,
- credentials_file=None,
- quota_project_id=None,
- **kwargs):
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
+ **kwargs,
+):
"""Create a secure channel with credentials.
Args:
@@ -261,35 +328,122 @@ def create_channel(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
kwargs: Additional key-word args passed to
:func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+ Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0.
Returns:
grpc.Channel: The created channel.
Raises:
google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
"""
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
composite_credentials = _create_composite_credentials(
credentials=credentials,
credentials_file=credentials_file,
+ default_scopes=default_scopes,
scopes=scopes,
ssl_credentials=ssl_credentials,
quota_project_id=quota_project_id,
+ default_host=default_host,
)
- if HAS_GRPC_GCP:
- # If grpc_gcp module is available use grpc_gcp.secure_channel,
- # otherwise, use grpc.secure_channel to create grpc channel.
+ # Note that grpcio-gcp is deprecated
+ if HAS_GRPC_GCP: # pragma: NO COVER
+ if compression is not None and compression != grpc.Compression.NoCompression:
+ warnings.warn(
+ "The `compression` argument is ignored for grpc_gcp.secure_channel creation.",
+ DeprecationWarning,
+ )
+ if attempt_direct_path:
+ warnings.warn(
+ """The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation.""",
+ DeprecationWarning,
+ )
return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
- else:
- return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+ if attempt_direct_path:
+ target = _modify_target_for_direct_path(target)
+
+ return grpc.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
+
+
+def _modify_target_for_direct_path(target: str) -> str:
+ """
+ Given a target, return a modified version which is compatible with Direct Path.
+
+ Args:
+ target (str): The target service address in the format 'hostname[:port]' or
+ 'dns://hostname[:port]'.
+
+ Returns:
+ target (str): The target service address which is converted into a format compatible with Direct Path.
+ If the target contains `dns:///` or does not contain `:///`, the target will be converted in
+ a format compatible with Direct Path; otherwise the original target will be returned as the
+ original target may already denote Direct Path.
+ """
+
+ # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet,
+ # outside of Google Cloud Platform.
+ dns_prefix = "dns:///"
+ # Remove "dns:///" if `attempt_direct_path` is set to True as
+ # the Direct Path prefix `google-c2p:///` will be used instead.
+ target = target.replace(dns_prefix, "")
+
+ direct_path_separator = ":///"
+ if direct_path_separator not in target:
+ target_without_port = target.split(":")[0]
+ # Modify the target to use Direct Path by adding the `google-c2p:///` prefix
+ target = f"google-c2p{direct_path_separator}{target_without_port}"
+ return target
_MethodCall = collections.namedtuple(
- "_MethodCall", ("request", "timeout", "metadata", "credentials")
+ "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression")
)
_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
@@ -316,11 +470,15 @@ def __init__(self, method, channel):
"""List[protobuf.Message]: All requests sent to this callable."""
self.calls = []
"""List[Tuple]: All invocations of this callable. Each tuple is the
- request, timeout, metadata, and credentials."""
+ request, timeout, metadata, compression, and credentials."""
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
+ def __call__(
+ self, request, timeout=None, metadata=None, credentials=None, compression=None
+ ):
self._channel.requests.append(_ChannelRequest(self._method, request))
- self.calls.append(_MethodCall(request, timeout, metadata, credentials))
+ self.calls.append(
+ _MethodCall(request, timeout, metadata, credentials, compression)
+ )
self.requests.append(request)
response = self.response
@@ -435,20 +593,42 @@ def __getattr__(self, key):
except KeyError:
raise AttributeError
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ def unary_unary(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.unary_unary implementation."""
return self._stub_for_method(method)
- def unary_stream(self, method, request_serializer=None, response_deserializer=None):
+ def unary_stream(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.unary_stream implementation."""
return self._stub_for_method(method)
- def stream_unary(self, method, request_serializer=None, response_deserializer=None):
+ def stream_unary(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.stream_unary implementation."""
return self._stub_for_method(method)
def stream_stream(
- self, method, request_serializer=None, response_deserializer=None
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
):
"""grpc.Channel.stream_stream implementation."""
return self._stub_for_method(method)
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
index 9a994e9f..af661430 100644
--- a/google/api_core/grpc_helpers_async.py
+++ b/google/api_core/grpc_helpers_async.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""AsyncIO helpers for :mod:`grpc` supporting 3.6+.
+"""AsyncIO helpers for :mod:`grpc` supporting 3.7+.
Please combine more detailed docstring in grpc_helpers.py to use following
functions. This module is implementing the same surface with AsyncIO semantics.
@@ -21,14 +21,15 @@
import asyncio
import functools
+from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar
+
import grpc
-from grpc.experimental import aio
+from grpc import aio
from google.api_core import exceptions, grpc_helpers
-
-# TODO(lidiz) Support gRPC GCP wrapper
-HAS_GRPC_GCP = False
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
# automatic patching for us. But that means the overhead of creating an
@@ -36,7 +37,6 @@
class _WrappedCall(aio.Call):
-
def __init__(self):
self._call = None
@@ -79,9 +79,8 @@ async def wait_for_connection(self):
raise exceptions.from_grpc_error(rpc_error) from rpc_error
-class _WrappedUnaryResponseMixin(_WrappedCall):
-
- def __await__(self):
+class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall):
+ def __await__(self) -> Iterator[P]:
try:
response = yield from self._call.__await__()
return response
@@ -89,18 +88,17 @@ def __await__(self):
raise exceptions.from_grpc_error(rpc_error) from rpc_error
-class _WrappedStreamResponseMixin(_WrappedCall):
-
+class _WrappedStreamResponseMixin(Generic[P], _WrappedCall):
def __init__(self):
self._wrapped_async_generator = None
- async def read(self):
+ async def read(self) -> P:
try:
return await self._call.read()
except grpc.RpcError as rpc_error:
raise exceptions.from_grpc_error(rpc_error) from rpc_error
- async def _wrapped_aiter(self):
+ async def _wrapped_aiter(self) -> AsyncGenerator[P, None]:
try:
# NOTE(lidiz) coverage doesn't understand the exception raised from
# __anext__ method. It is covered by test case:
@@ -110,14 +108,13 @@ async def _wrapped_aiter(self):
except grpc.RpcError as rpc_error:
raise exceptions.from_grpc_error(rpc_error) from rpc_error
- def __aiter__(self):
+ def __aiter__(self) -> AsyncGenerator[P, None]:
if not self._wrapped_async_generator:
self._wrapped_async_generator = self._wrapped_aiter()
return self._wrapped_async_generator
class _WrappedStreamRequestMixin(_WrappedCall):
-
async def write(self, request):
try:
await self._call.write(request)
@@ -134,25 +131,34 @@ async def done_writing(self):
# NOTE(lidiz) Implementing each individual class separately, so we don't
# expose any API that should not be seen. E.g., __aiter__ in unary-unary
# RPC, or __await__ in stream-stream RPC.
-class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall):
+class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall):
"""Wrapped UnaryUnaryCall to map exceptions."""
-class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall):
+class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall):
"""Wrapped UnaryStreamCall to map exceptions."""
-class _WrappedStreamUnaryCall(_WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall):
+class _WrappedStreamUnaryCall(
+ _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall
+):
"""Wrapped StreamUnaryCall to map exceptions."""
-class _WrappedStreamStreamCall(_WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall):
+class _WrappedStreamStreamCall(
+ _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall
+):
"""Wrapped StreamStreamCall to map exceptions."""
+# public type alias denoting the return type of async streaming gapic calls
+GrpcAsyncStream = _WrappedStreamResponseMixin
+# public type alias denoting the return type of unary gapic calls
+AwaitableGrpcCall = _WrappedUnaryResponseMixin
+
+
def _wrap_unary_errors(callable_):
"""Map errors for Unary-Unary async callables."""
- grpc_helpers._patch_callable_name(callable_)
@functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
@@ -162,23 +168,13 @@ def error_remapped_callable(*args, **kwargs):
return error_remapped_callable
-def _wrap_stream_errors(callable_):
+def _wrap_stream_errors(callable_, wrapper_type):
"""Map errors for streaming RPC async callables."""
- grpc_helpers._patch_callable_name(callable_)
@functools.wraps(callable_)
async def error_remapped_callable(*args, **kwargs):
call = callable_(*args, **kwargs)
-
- if isinstance(call, aio.UnaryStreamCall):
- call = _WrappedUnaryStreamCall().with_call(call)
- elif isinstance(call, aio.StreamUnaryCall):
- call = _WrappedStreamUnaryCall().with_call(call)
- elif isinstance(call, aio.StreamStreamCall):
- call = _WrappedStreamStreamCall().with_call(call)
- else:
- raise TypeError('Unexpected type of call %s' % type(call))
-
+ call = wrapper_type().with_call(call)
await call.wait_for_connection()
return call
@@ -200,20 +196,31 @@ def wrap_errors(callable_):
Returns: Callable: The wrapped gRPC callable.
"""
- if isinstance(callable_, aio.UnaryUnaryMultiCallable):
- return _wrap_unary_errors(callable_)
+ grpc_helpers._patch_callable_name(callable_)
+
+ if isinstance(callable_, aio.UnaryStreamMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedUnaryStreamCall)
+ elif isinstance(callable_, aio.StreamUnaryMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedStreamUnaryCall)
+ elif isinstance(callable_, aio.StreamStreamMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedStreamStreamCall)
else:
- return _wrap_stream_errors(callable_)
+ return _wrap_unary_errors(callable_)
def create_channel(
- target,
- credentials=None,
- scopes=None,
- ssl_credentials=None,
- credentials_file=None,
- quota_project_id=None,
- **kwargs):
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
+ **kwargs
+):
"""Create an AsyncIO secure channel with credentials.
Args:
@@ -229,7 +236,40 @@ def create_channel(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
Returns:
@@ -237,17 +277,31 @@ def create_channel(
Raises:
google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
"""
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
composite_credentials = grpc_helpers._create_composite_credentials(
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
+ default_scopes=default_scopes,
ssl_credentials=ssl_credentials,
quota_project_id=quota_project_id,
+ default_host=default_host,
)
- return aio.secure_channel(target, composite_credentials, **kwargs)
+ if attempt_direct_path:
+ target = grpc_helpers._modify_target_for_direct_path(target)
+
+ return aio.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
diff --git a/google/api_core/iam.py b/google/api_core/iam.py
index f1309360..4437c701 100644
--- a/google/api_core/iam.py
+++ b/google/api_core/iam.py
@@ -52,14 +52,10 @@
"""
import collections
+import collections.abc
import operator
import warnings
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
-
# Generic IAM roles
OWNER_ROLE = "roles/owner"
@@ -74,9 +70,6 @@
_ASSIGNMENT_DEPRECATED_MSG = """\
Assigning to '{}' is deprecated. Use the `policy.bindings` property to modify bindings instead."""
-_FACTORY_DEPRECATED_MSG = """\
-Factory method {0} is deprecated. Replace with '{0}'."""
-
_DICT_ACCESS_MSG = """\
Dict access is not supported on policies with version > 1 or with conditional bindings."""
@@ -87,7 +80,7 @@ class InvalidOperationException(Exception):
pass
-class Policy(collections_abc.MutableMapping):
+class Policy(collections.abc.MutableMapping):
"""IAM Policy
Args:
@@ -125,18 +118,25 @@ def __init__(self, etag=None, version=None):
def __iter__(self):
self.__check_version__()
- return (binding["role"] for binding in self._bindings)
+ # Exclude bindings with no members
+ return (binding["role"] for binding in self._bindings if binding["members"])
def __len__(self):
self.__check_version__()
- return len(self._bindings)
+ # Exclude bindings with no members
+ return len(list(self.__iter__()))
def __getitem__(self, key):
self.__check_version__()
for b in self._bindings:
if b["role"] == key:
return b["members"]
- return set()
+ # If the binding does not yet exist, create one
+ # NOTE: This will create bindings with no members
+ # which are ignored by __iter__ and __len__
+ new_binding = {"role": key, "members": set()}
+ self._bindings.append(new_binding)
+ return new_binding["members"]
def __setitem__(self, key, value):
self.__check_version__()
@@ -316,12 +316,7 @@ def user(email):
Returns:
str: A member string corresponding to the given user.
-
- DEPRECATED: set the role `user:{email}` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("user:{email}"), DeprecationWarning,
- )
return "user:%s" % (email,)
@staticmethod
@@ -334,12 +329,7 @@ def service_account(email):
Returns:
str: A member string corresponding to the given service account.
- DEPRECATED: set the role `serviceAccount:{email}` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("serviceAccount:{email}"),
- DeprecationWarning,
- )
return "serviceAccount:%s" % (email,)
@staticmethod
@@ -351,12 +341,7 @@ def group(email):
Returns:
str: A member string corresponding to the given group.
-
- DEPRECATED: set the role `group:{email}` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("group:{email}"), DeprecationWarning,
- )
return "group:%s" % (email,)
@staticmethod
@@ -368,12 +353,7 @@ def domain(domain):
Returns:
str: A member string corresponding to the given domain.
-
- DEPRECATED: set the role `domain:{email}` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("domain:{email}"), DeprecationWarning,
- )
return "domain:%s" % (domain,)
@staticmethod
@@ -382,12 +362,7 @@ def all_users():
Returns:
str: A member string representing all users.
-
- DEPRECATED: set the role `allUsers` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("allUsers"), DeprecationWarning,
- )
return "allUsers"
@staticmethod
@@ -396,12 +371,7 @@ def authenticated_users():
Returns:
str: A member string representing all authenticated users.
-
- DEPRECATED: set the role `allAuthenticatedUsers` in the binding instead.
"""
- warnings.warn(
- _FACTORY_DEPRECATED_MSG.format("allAuthenticatedUsers"), DeprecationWarning,
- )
return "allAuthenticatedUsers"
@classmethod
@@ -443,10 +413,7 @@ def to_api_repr(self):
for binding in self._bindings:
members = binding.get("members")
if members:
- new_binding = {
- "role": binding["role"],
- "members": sorted(members)
- }
+ new_binding = {"role": binding["role"], "members": sorted(members)}
condition = binding.get("condition")
if condition:
new_binding["condition"] = condition
diff --git a/google/api_core/operation.py b/google/api_core/operation.py
index e6407b8c..4b9c9a58 100644
--- a/google/api_core/operation.py
+++ b/google/api_core/operation.py
@@ -61,10 +61,13 @@ class Operation(polling.PollingFuture):
result.
metadata_type (func:`type`): The protobuf type for the operation's
metadata.
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in the :meth:`result` method, it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead.
+ If specified it will override ``polling`` parameter to maintain
+ backward compatibility.
"""
def __init__(
@@ -74,9 +77,10 @@ def __init__(
cancel,
result_type,
metadata_type=None,
- retry=polling.DEFAULT_RETRY,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs
):
- super(Operation, self).__init__(retry=retry)
+ super(Operation, self).__init__(polling=polling, **kwargs)
self._operation = operation
self._refresh = refresh
self._cancel = cancel
@@ -132,8 +136,9 @@ def _set_result_from_operation(self):
)
self.set_result(response)
elif self._operation.HasField("error"):
- exception = exceptions.GoogleAPICallError(
- self._operation.error.message,
+ exception = exceptions.from_grpc_status(
+ status_code=self._operation.error.code,
+ message=self._operation.error.message,
errors=(self._operation.error,),
response=self._operation,
)
@@ -145,7 +150,7 @@ def _set_result_from_operation(self):
)
self.set_exception(exception)
- def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
+ def _refresh_and_update(self, retry=None):
"""Refresh the operation and update the result if needed.
Args:
@@ -154,10 +159,10 @@ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
# If the currently cached operation is done, no need to make another
# RPC as it will not change once done.
if not self._operation.done:
- self._operation = self._refresh(retry=retry)
+ self._operation = self._refresh(retry=retry) if retry else self._refresh()
self._set_result_from_operation()
- def done(self, retry=polling.DEFAULT_RETRY):
+ def done(self, retry=None):
"""Checks to see if the operation is complete.
Args:
@@ -191,7 +196,7 @@ def cancelled(self):
)
-def _refresh_http(api_request, operation_name):
+def _refresh_http(api_request, operation_name, retry=None):
"""Refresh an operation using a JSON/HTTP client.
Args:
@@ -199,11 +204,16 @@ def _refresh_http(api_request, operation_name):
should generally be
:meth:`google.cloud._http.Connection.api_request`.
operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
Returns:
google.longrunning.operations_pb2.Operation: The operation.
"""
path = "operations/{}".format(operation_name)
+
+ if retry is not None:
+ api_request = retry(api_request)
+
api_response = api_request(method="GET", path=path)
return json_format.ParseDict(api_response, operations_pb2.Operation())
@@ -248,19 +258,25 @@ def from_http_json(operation, api_request, result_type, **kwargs):
return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
-def _refresh_grpc(operations_stub, operation_name):
+def _refresh_grpc(operations_stub, operation_name, retry=None):
"""Refresh an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
Returns:
google.longrunning.operations_pb2.Operation: The operation.
"""
request_pb = operations_pb2.GetOperationRequest(name=operation_name)
- return operations_stub.GetOperation(request_pb)
+
+ rpc = operations_stub.GetOperation
+ if retry is not None:
+ rpc = retry(rpc)
+
+ return rpc(request_pb)
def _cancel_grpc(operations_stub, operation_name):
@@ -275,7 +291,7 @@ def _cancel_grpc(operations_stub, operation_name):
operations_stub.CancelOperation(request_pb)
-def from_grpc(operation, operations_stub, result_type, **kwargs):
+def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwargs):
"""Create an operation future using a gRPC client.
This interacts with the long-running operations `service`_ (specific
@@ -290,18 +306,30 @@ def from_grpc(operation, operations_stub, result_type, **kwargs):
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The operations stub.
result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
- refresh = functools.partial(_refresh_grpc, operations_stub, operation.name)
- cancel = functools.partial(_cancel_grpc, operations_stub, operation.name)
+ refresh = functools.partial(
+ _refresh_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ _cancel_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
+ )
return Operation(operation, refresh, cancel, result_type, **kwargs)
-def from_gapic(operation, operations_client, result_type, **kwargs):
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
"""Create an operation future from a gapic client.
This interacts with the long-running operations `service`_ (specific
@@ -316,12 +344,22 @@ def from_gapic(operation, operations_client, result_type, **kwargs):
operations_client (google.api_core.operations_v1.OperationsClient):
The operations client.
result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
- refresh = functools.partial(operations_client.get_operation, operation.name)
- cancel = functools.partial(operations_client.cancel_operation, operation.name)
+ refresh = functools.partial(
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operation_async.py b/google/api_core/operation_async.py
index 89500af1..2fd341d9 100644
--- a/google/api_core/operation_async.py
+++ b/google/api_core/operation_async.py
@@ -189,7 +189,7 @@ async def cancelled(self):
)
-def from_gapic(operation, operations_client, result_type, **kwargs):
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
"""Create an operation future from a gapic client.
This interacts with the long-running operations `service`_ (specific
@@ -204,12 +204,22 @@ def from_gapic(operation, operations_client, result_type, **kwargs):
operations_client (google.api_core.operations_v1.OperationsClient):
The operations client.
result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
- refresh = functools.partial(operations_client.get_operation, operation.name)
- cancel = functools.partial(operations_client.cancel_operation, operation.name)
+ refresh = functools.partial(
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
index bc9befcb..4db32a4c 100644
--- a/google/api_core/operations_v1/__init__.py
+++ b/google/api_core/operations_v1/__init__.py
@@ -14,11 +14,27 @@
"""Package for interacting with the google.longrunning.operations meta-API."""
-import sys
-
+from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
+from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
from google.api_core.operations_v1.operations_client import OperationsClient
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+__all__ = [
+ "AbstractOperationsClient",
+ "OperationsAsyncClient",
+ "OperationsClient",
+ "OperationsRestTransport"
+]
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+ from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient
-__all__ = ["OperationsClient"]
-if sys.version_info >= (3, 6, 0):
- from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient # noqa: F401
- __all__.append("OperationsAsyncClient")
+ __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"]
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py
new file mode 100644
index 00000000..160c2a88
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_base_client.py
@@ -0,0 +1,370 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Optional, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+
+ HAS_ASYNC_REST_DEPENDENCIES = True
+except ImportError as e:
+ HAS_ASYNC_REST_DEPENDENCIES = False
+ ASYNC_REST_EXCEPTION = e
+
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+
+
+class AbstractOperationsBaseClientMeta(type):
+ """Metaclass for the Operations Base client.
+
+ This provides base class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+ if HAS_ASYNC_REST_DEPENDENCIES:
+ _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport
+
+ def get_transport_class(
+ cls,
+ label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if (
+ label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES
+ ): # pragma: NO COVER
+ raise ASYNC_REST_EXCEPTION
+
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_info` is not implemented.")
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_file` is not implemented.")
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(
+ billing_account: str,
+ ) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(
+ folder: str,
+ ) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(
+ folder=folder,
+ )
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(
+ organization: str,
+ ) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(
+ organization=organization,
+ )
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(
+ project: str,
+ ) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(
+ project=project,
+ )
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(
+ project: str,
+ location: str,
+ ) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert == "true":
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
new file mode 100644
index 00000000..fc445362
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_client.py
@@ -0,0 +1,387 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import Optional, Sequence, Tuple, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
+from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+import grpc
+
+OptionalRetry = Union[retries.Retry, object]
+
+
+class AbstractOperationsClient(AbstractOperationsBaseClient):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ super().__init__(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def delete_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py
index 039bec1b..a60c7177 100644
--- a/google/api_core/operations_v1/operations_async_client.py
+++ b/google/api_core/operations_v1/operations_async_client.py
@@ -24,9 +24,12 @@
import functools
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1, page_iterator_async
-from google.api_core.operations_v1 import operations_client_config
+from google.api_core import retry_async as retries
+from google.api_core import timeout as timeouts
from google.longrunning import operations_pb2
+from grpc import Compression
class OperationsAsyncClient:
@@ -41,43 +44,59 @@ class OperationsAsyncClient:
the default configuration is used.
"""
- def __init__(self, channel, client_config=operations_client_config.config):
+ def __init__(self, channel, client_config=None):
# Create the gRPC client stub with gRPC AsyncIO channel.
self.operations_stub = operations_pb2.OperationsStub(channel)
- # Create all wrapped methods using the interface configuration.
- # The interface config contains all of the default settings for retry
- # and timeout for each RPC method.
- interfaces = client_config["interfaces"]
- interface_config = interfaces["google.longrunning.Operations"]
- method_configs = gapic_v1.config_async.parse_method_configs(interface_config)
+ default_retry = retries.AsyncRetry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
self._get_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.GetOperation,
- default_retry=method_configs["GetOperation"].retry,
- default_timeout=method_configs["GetOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._list_operations = gapic_v1.method_async.wrap_method(
self.operations_stub.ListOperations,
- default_retry=method_configs["ListOperations"].retry,
- default_timeout=method_configs["ListOperations"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._cancel_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.CancelOperation,
- default_retry=method_configs["CancelOperation"].retry,
- default_timeout=method_configs["CancelOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._delete_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.DeleteOperation,
- default_retry=method_configs["DeleteOperation"].retry,
- default_timeout=method_configs["DeleteOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
async def get_operation(
- self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
):
"""Gets the latest state of a long-running operation.
@@ -103,6 +122,10 @@ async def get_operation(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
Returns:
google.longrunning.operations_pb2.Operation: The state of the
@@ -114,7 +137,18 @@ async def get_operation(
subclass will be raised.
"""
request = operations_pb2.GetOperationRequest(name=name)
- return await self._get_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return await self._get_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
async def list_operations(
self,
@@ -122,6 +156,8 @@ async def list_operations(
filter_,
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
):
"""
Lists operations that match the specified filter in the request.
@@ -157,6 +193,10 @@ async def list_operations(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
Returns:
google.api_core.page_iterator.Iterator: An iterator that yields
@@ -173,8 +213,18 @@ async def list_operations(
# Create the request object.
request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
# Create the method used to fetch pages
- method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
+ method = functools.partial(
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
iterator = page_iterator_async.AsyncGRPCIterator(
client=None,
@@ -188,7 +238,12 @@ async def list_operations(
return iterator
async def cancel_operation(
- self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
):
"""Starts asynchronous cancellation on a long-running operation.
@@ -228,13 +283,33 @@ async def cancel_operation(
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
"""
# Create the request object.
request = operations_pb2.CancelOperationRequest(name=name)
- await self._cancel_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._cancel_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
async def delete_operation(
- self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
):
"""Deletes a long-running operation.
@@ -260,6 +335,10 @@ async def delete_operation(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
@@ -271,4 +350,15 @@ async def delete_operation(
"""
# Create the request object.
request = operations_pb2.DeleteOperationRequest(name=name)
- await self._delete_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._delete_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py
index cd2923bb..d1d3fd55 100644
--- a/google/api_core/operations_v1/operations_client.py
+++ b/google/api_core/operations_v1/operations_client.py
@@ -37,10 +37,13 @@
import functools
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import page_iterator
-from google.api_core.operations_v1 import operations_client_config
+from google.api_core import retry as retries
+from google.api_core import timeout as timeouts
from google.longrunning import operations_pb2
+from grpc import Compression
class OperationsClient(object):
@@ -54,44 +57,60 @@ class OperationsClient(object):
the default configuration is used.
"""
- def __init__(self, channel, client_config=operations_client_config.config):
+ def __init__(self, channel, client_config=None):
# Create the gRPC client stub.
self.operations_stub = operations_pb2.OperationsStub(channel)
- # Create all wrapped methods using the interface configuration.
- # The interface config contains all of the default settings for retry
- # and timeout for each RPC method.
- interfaces = client_config["interfaces"]
- interface_config = interfaces["google.longrunning.Operations"]
- method_configs = gapic_v1.config.parse_method_configs(interface_config)
+ default_retry = retries.Retry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
self._get_operation = gapic_v1.method.wrap_method(
self.operations_stub.GetOperation,
- default_retry=method_configs["GetOperation"].retry,
- default_timeout=method_configs["GetOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._list_operations = gapic_v1.method.wrap_method(
self.operations_stub.ListOperations,
- default_retry=method_configs["ListOperations"].retry,
- default_timeout=method_configs["ListOperations"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._cancel_operation = gapic_v1.method.wrap_method(
self.operations_stub.CancelOperation,
- default_retry=method_configs["CancelOperation"].retry,
- default_timeout=method_configs["CancelOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._delete_operation = gapic_v1.method.wrap_method(
self.operations_stub.DeleteOperation,
- default_retry=method_configs["DeleteOperation"].retry,
- default_timeout=method_configs["DeleteOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
# Service calls
def get_operation(
- self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
):
"""Gets the latest state of a long-running operation.
@@ -117,6 +136,10 @@ def get_operation(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
Returns:
google.longrunning.operations_pb2.Operation: The state of the
@@ -128,7 +151,18 @@ def get_operation(
subclass will be raised.
"""
request = operations_pb2.GetOperationRequest(name=name)
- return self._get_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return self._get_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
def list_operations(
self,
@@ -136,6 +170,8 @@ def list_operations(
filter_,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
):
"""
Lists operations that match the specified filter in the request.
@@ -171,6 +207,10 @@ def list_operations(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
Returns:
google.api_core.page_iterator.Iterator: An iterator that yields
@@ -187,8 +227,18 @@ def list_operations(
# Create the request object.
request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
# Create the method used to fetch pages
- method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
+ method = functools.partial(
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
iterator = page_iterator.GRPCIterator(
client=None,
@@ -202,7 +252,12 @@ def list_operations(
return iterator
def cancel_operation(
- self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
):
"""Starts asynchronous cancellation on a long-running operation.
@@ -234,6 +289,10 @@ def cancel_operation(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
@@ -245,10 +304,26 @@ def cancel_operation(
"""
# Create the request object.
request = operations_pb2.CancelOperationRequest(name=name)
- self._cancel_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._cancel_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
def delete_operation(
- self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
):
"""Deletes a long-running operation.
@@ -274,6 +349,10 @@ def delete_operation(
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
@@ -285,4 +364,15 @@ def delete_operation(
"""
# Create the request object.
request = operations_pb2.DeleteOperationRequest(name=name)
- self._delete_operation(request, retry=retry, timeout=timeout)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._delete_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py
index 6cf95753..3ad3548c 100644
--- a/google/api_core/operations_v1/operations_client_config.py
+++ b/google/api_core/operations_v1/operations_client_config.py
@@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""gapic configuration for the googe.longrunning.operations client."""
+"""gapic configuration for the google.longrunning.operations client."""
+# DEPRECATED: retry and timeout classes are instantiated directly
config = {
"interfaces": {
"google.longrunning.Operations": {
diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py
new file mode 100644
index 00000000..7ab0cd36
--- /dev/null
+++ b/google/api_core/operations_v1/operations_rest_client_async.py
@@ -0,0 +1,345 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import Optional, Sequence, Tuple, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1 import pagers_async as pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
+from google.longrunning import operations_pb2
+
+try:
+ from google.auth.aio import credentials as ga_credentials # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+
+class AsyncOperationsRestClient(AbstractOperationsBaseClient):
+ """Manages long-running operations with a REST API service for the asynchronous client.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, this defaults to 'rest_asyncio'.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ super().__init__(
+ credentials=credentials, # type: ignore
+ # NOTE: If a transport is not provided, we force the client to use the async
+ # REST transport.
+ transport=transport or "rest_asyncio",
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def get_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsAsyncPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsAsyncPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ async def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
new file mode 100644
index 00000000..132f1c66
--- /dev/null
+++ b/google/api_core/operations_v1/pagers.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Callable,
+ Iterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
+
+
+class ListOperationsPager(ListOperationsPagerBase):
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
+
+ @property
+ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterator[operations_pb2.Operation]:
+ for page in self.pages:
+ yield from page.operations
diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py
new file mode 100644
index 00000000..e2909dd5
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_async.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Callable,
+ AsyncIterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
+
+
+class ListOperationsAsyncPager(ListOperationsPagerBase):
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
+
+ @property
+ async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]:
+ async def async_generator():
+ async for page in self.pages:
+ for operation in page.operations:
+ yield operation
+
+ return async_generator()
diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py
new file mode 100644
index 00000000..24caf74f
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_base.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPagerBase:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py
new file mode 100644
index 00000000..8c24ce6e
--- /dev/null
+++ b/google/api_core/operations_v1/transports/__init__.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import cast, Dict, Tuple
+
+from .base import OperationsTransport
+from .rest import OperationsRestTransport
+
+# Compile a registry of transports.
+_transport_registry: Dict[str, OperationsTransport] = OrderedDict()
+_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport)
+
+__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport")
+
+try:
+ from .rest_asyncio import AsyncOperationsRestTransport
+
+ __all__ += ("AsyncOperationsRestTransport",)
+ _transport_registry["rest_asyncio"] = cast(
+ OperationsTransport, AsyncOperationsRestTransport
+ )
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
new file mode 100644
index 00000000..71764c1e
--- /dev/null
+++ b/google/api_core/operations_v1/transports/base.py
@@ -0,0 +1,294 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+import re
+from typing import Awaitable, Callable, Optional, Sequence, Union
+
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import version
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+import google.protobuf
+from google.protobuf import empty_pb2, json_format # type: ignore
+from grpc import Compression
+
+
+PROTOBUF_VERSION = google.protobuf.__version__
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=version.__version__,
+)
+
+
+class OperationsTransport(abc.ABC):
+ """Abstract transport class for Operations."""
+
+ AUTH_SCOPES = ()
+
+ DEFAULT_HOST: str = "longrunning.googleapis.com"
+
+ def __init__(
+ self,
+ *,
+ host: str = DEFAULT_HOST,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`.
+ credentials: Optional[ga_credentials.Credentials] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme="https",
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
+ scopes (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443" # pragma: NO COVER
+ self._host = host
+
+ scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+ # Save the scopes.
+ self._scopes = scopes
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise core_exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ # If the credentials are service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ }
+
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
+ def _convert_protobuf_message_to_dict(
+ self, message: google.protobuf.message.Message
+ ):
+ r"""Converts protobuf message to a dictionary.
+
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
+
+ Args:
+ message(google.protobuf.message.Message): The protocol buffers message
+ instance to serialize.
+
+ Returns:
+ A dict representation of the protocol buffer message.
+ """
+ # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility
+ # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped.
+ if PROTOBUF_VERSION[0:2] in ["3.", "4."]:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True, # type: ignore # backward compatibility
+ )
+ else:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ always_print_fields_with_no_presence=True,
+ )
+
+ return result
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Union[
+ operations_pb2.ListOperationsResponse,
+ Awaitable[operations_pb2.ListOperationsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("OperationsTransport",)
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
new file mode 100644
index 00000000..0705c518
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest.py
@@ -0,0 +1,485 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from requests import __version__ as requests_version
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+import google.protobuf
+
+import grpc
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+PROTOBUF_VERSION = google.protobuf.__version__
+
+OptionalRetry = Union[retries.Retry, object]
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=f"requests@{requests_version}",
+)
+
+
+class OperationsRestTransport(OperationsTransport):
+ """REST backend transport for Operations.
+
+ Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: Optional[ga_credentials.Credentials] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+ certificate to configure mutual TLS HTTP channel. It is ignored
+ if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operations.proto. Each method has an entry
+ with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
+
+ """
+ # Run the base constructor
+ # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+ # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+ # credentials object
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+ self._session = AuthorizedSession(
+ self._credentials, default_host=self.DEFAULT_HOST
+ )
+ if client_cert_source_for_mtls:
+ self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+ self._path_prefix = path_prefix
+
+ def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]:
+ return self._cancel_operation
+
+
+__all__ = ("OperationsRestTransport",)
diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py
new file mode 100644
index 00000000..71c20eb8
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest_asyncio.py
@@ -0,0 +1,560 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import json
+from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple
+
+from google.auth import __version__ as auth_version
+
+try:
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry_async as retries_async # type: ignore
+from google.auth.aio import credentials as ga_credentials_async # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=f"google-auth@{auth_version}",
+)
+
+
+class AsyncOperationsRestTransport(OperationsTransport):
+ """Asynchronous REST backend transport for Operations.
+
+ Manages async long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: Optional[ga_credentials_async.Credentials] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport.
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operations.proto. Each method has an entry
+ with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
+
+ """
+ unsupported_params = {
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport.
+ "google.api_core.client_options.ClientOptions.credentials_file": credentials_file,
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport.
+ "google.api_core.client_options.ClientOptions.scopes": scopes,
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport.
+ "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ }
+ provided_unsupported_params = [
+ name for name, value in unsupported_params.items() if value is not None
+ ]
+ if provided_unsupported_params:
+ raise core_exceptions.AsyncRestUnsupportedParameterError(
+ f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}"
+ )
+
+ super().__init__(
+ host=host,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ credentials=credentials, # type: ignore
+ client_info=client_info,
+ # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported.
+ always_use_jwt_access=False,
+ )
+ # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for
+ # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous
+ # code.
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ self._session = AsyncAuthorizedSession(self._credentials) # type: ignore
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+ self._path_prefix = path_prefix
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method_async.wrap_method(
+ self.list_operations,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.get_operation: gapic_v1.method_async.wrap_method(
+ self.get_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.delete_operation: gapic_v1.method_async.wrap_method(
+ self.delete_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.cancel_operation: gapic_v1.method_async.wrap_method(
+ self.cancel_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ }
+
+ async def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Asynchronously call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Asynchronously call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ async def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter
+ # to allow configuring retryable error codes.
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Coroutine[Any, Any, operations_pb2.ListOperationsResponse],
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Coroutine[Any, Any, operations_pb2.Operation],
+ ]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._cancel_operation
+
+
+__all__ = ("AsyncOperationsRestTransport",)
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
index 11a92d38..23761ec4 100644
--- a/google/api_core/page_iterator.py
+++ b/google/api_core/page_iterator.py
@@ -81,8 +81,6 @@
import abc
-import six
-
class Page(object):
"""Single page of results in an iterator.
@@ -127,18 +125,15 @@ def __iter__(self):
"""The :class:`Page` is an iterator of items."""
return self
- def next(self):
+ def __next__(self):
"""Get the next value in the page."""
- item = six.next(self._item_iter)
+ item = next(self._item_iter)
result = self._item_to_value(self._parent, item)
# Since we've successfully got the next value from the
# iterator, we update the number of remaining.
self._remaining -= 1
return result
- # Alias needed for Python 2/3 support.
- __next__ = next
-
def _item_to_value_identity(iterator, item):
"""An item to value transformer that returns the item un-changed."""
@@ -147,8 +142,7 @@ def _item_to_value_identity(iterator, item):
return item
-@six.add_metaclass(abc.ABCMeta)
-class Iterator(object):
+class Iterator(object, metaclass=abc.ABCMeta):
"""A generic class for iterating through API list responses.
Args:
@@ -170,6 +164,8 @@ def __init__(
max_results=None,
):
self._started = False
+ self.__active_iterator = None
+
self.client = client
"""Optional[Any]: The client that created this iterator."""
self.item_to_value = item_to_value
@@ -179,7 +175,7 @@ def __init__(
single item.
"""
self.max_results = max_results
- """int: The maximum number of results to fetch."""
+ """int: The maximum number of results to fetch"""
# The attributes below will change over the life of the iterator.
self.page_number = 0
@@ -228,6 +224,11 @@ def __iter__(self):
self._started = True
return self._items_iter()
+ def __next__(self):
+ if self.__active_iterator is None:
+ self.__active_iterator = iter(self)
+ return next(self.__active_iterator)
+
def _page_iter(self, increment):
"""Generator of pages of API responses.
@@ -298,7 +299,8 @@ class HTTPIterator(Iterator):
can be found.
page_token (str): A token identifying a page in a result set to start
fetching results from.
- max_results (int): The maximum number of results to fetch.
+ page_size (int): The maximum number of results to fetch per page
+ max_results (int): The maximum number of results to fetch
extra_params (dict): Extra query string parameters for the
API call.
page_start (Callable[
@@ -329,6 +331,7 @@ def __init__(
item_to_value,
items_key=_DEFAULT_ITEMS_KEY,
page_token=None,
+ page_size=None,
max_results=None,
extra_params=None,
page_start=_do_nothing_page_start,
@@ -341,6 +344,7 @@ def __init__(
self.path = path
self._items_key = items_key
self.extra_params = extra_params
+ self._page_size = page_size
self._page_start = page_start
self._next_token = next_token
# Verify inputs / provide defaults.
@@ -399,8 +403,18 @@ def _get_query_params(self):
result = {}
if self.next_page_token is not None:
result[self._PAGE_TOKEN] = self.next_page_token
+
+ page_size = None
if self.max_results is not None:
- result[self._MAX_RESULTS] = self.max_results - self.num_results
+ page_size = self.max_results - self.num_results
+ if self._page_size is not None:
+ page_size = min(page_size, self._page_size)
+ elif self._page_size is not None:
+ page_size = self._page_size
+
+ if page_size is not None:
+ result[self._MAX_RESULTS] = page_size
+
result.update(self.extra_params)
return result
@@ -434,7 +448,7 @@ class _GAXIterator(Iterator):
page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
to conform to the :class:`Iterator` interface.
item_to_value (Callable[Iterator, Any]): Callable to convert an item
- from the the protobuf response into a native object. Will
+ from the protobuf response into a native object. Will
be called with the iterator and a single item.
max_results (int): The maximum number of results to fetch.
@@ -461,7 +475,7 @@ def _next_page(self):
there are no pages left.
"""
try:
- items = six.next(self._gax_page_iter)
+ items = next(self._gax_page_iter)
page = Page(self, items, self.item_to_value)
self.next_page_token = self._gax_page_iter.page_token or None
return page
diff --git a/google/api_core/page_iterator_async.py b/google/api_core/page_iterator_async.py
index a0aa41a7..c0725758 100644
--- a/google/api_core/page_iterator_async.py
+++ b/google/api_core/page_iterator_async.py
@@ -101,6 +101,8 @@ def __init__(
max_results=None,
):
self._started = False
+ self.__active_aiterator = None
+
self.client = client
"""Optional[Any]: The client that created this iterator."""
self.item_to_value = item_to_value
@@ -159,6 +161,11 @@ def __aiter__(self):
self._started = True
return self._items_aiter()
+ async def __anext__(self):
+ if self.__active_aiterator is None:
+ self.__active_aiterator = self.__aiter__()
+ return await self.__active_aiterator.__anext__()
+
async def _page_aiter(self, increment):
"""Generator of pages of API responses.
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
index bb549356..b8ebb2af 100644
--- a/google/api_core/path_template.py
+++ b/google/api_core/path_template.py
@@ -25,11 +25,11 @@
from __future__ import unicode_literals
+from collections import deque
+import copy
import functools
import re
-import six
-
# Regular expression for extracting variable parts from a path template.
# The variables can be expressed as:
#
@@ -66,7 +66,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
"""Expand a matched variable with its value.
Args:
- positional_vars (list): A list of positonal variables. This list will
+ positional_vars (list): A list of positional variables. This list will
be modified.
named_vars (dict): A dictionary of named variables.
match (re.Match): A regular expression match.
@@ -83,7 +83,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
name = match.group("name")
if name is not None:
try:
- return six.text_type(named_vars[name])
+ return str(named_vars[name])
except KeyError:
raise ValueError(
"Named variable '{}' not specified and needed by template "
@@ -91,7 +91,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
)
elif positional is not None:
try:
- return six.text_type(positional_vars.pop(0))
+ return str(positional_vars.pop(0))
except IndexError:
raise ValueError(
"Positional variable not specified and needed by template "
@@ -104,7 +104,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
def expand(tmpl, *args, **kwargs):
"""Expand a path template with the given variables.
- ..code-block:: python
+ .. code-block:: python
>>> expand('users/*/messages/*', 'me', '123')
users/me/messages/123
@@ -172,6 +172,56 @@ def _generate_pattern_for_template(tmpl):
return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
+def get_field(request, field):
+ """Get the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary or a Message object.
+ field (str): The key to the request in dot notation.
+
+ Returns:
+ The value of the field.
+ """
+ parts = field.split(".")
+ value = request
+
+ for part in parts:
+ if not isinstance(value, dict):
+ value = getattr(value, part, None)
+ else:
+ value = value.get(part)
+ if isinstance(value, dict):
+ return
+ return value
+
+
+def delete_field(request, field):
+ """Delete the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary object or a Message.
+ field (str): The key to the request in dot notation.
+ """
+ parts = deque(field.split("."))
+ while len(parts) > 1:
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request = getattr(request, part, None)
+ else:
+ return
+ else:
+ request = request.get(part)
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request.ClearField(part)
+ else:
+ return
+ else:
+ request.pop(part, None)
+
+
def validate(tmpl, path):
"""Validate a path against the path template.
@@ -195,3 +245,102 @@ def validate(tmpl, path):
"""
pattern = _generate_pattern_for_template(tmpl) + "$"
return True if re.match(pattern, path) is not None else False
+
+
+def transcode(http_options, message=None, **request_kwargs):
+ """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
+ https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
+
+ Args:
+ http_options (list(dict)): A list of dicts which consist of these keys,
+ 'method' (str): The http method
+ 'uri' (str): The path template
+ 'body' (str): The body field name (optional)
+ (This is a simplified representation of the proto option `google.api.http`)
+
+ message (Message) : A request object (optional)
+ request_kwargs (dict) : A dict representing the request object
+
+ Returns:
+ dict: The transcoded request with these keys,
+ 'method' (str) : The http method
+ 'uri' (str) : The expanded uri
+ 'body' (dict | Message) : A dict or a Message representing the body (optional)
+ 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values
+
+ Raises:
+ ValueError: If the request does not match the given template.
+ """
+ transcoded_value = message or request_kwargs
+ bindings = []
+ for http_option in http_options:
+ request = {}
+
+ # Assign path
+ uri_template = http_option["uri"]
+ fields = [
+ (m.group("name"), m.group("template"))
+ for m in _VARIABLE_RE.finditer(uri_template)
+ ]
+ bindings.append((uri_template, fields))
+
+ path_args = {field: get_field(transcoded_value, field) for field, _ in fields}
+ request["uri"] = expand(uri_template, **path_args)
+
+ if not validate(uri_template, request["uri"]) or not all(path_args.values()):
+ continue
+
+ # Remove fields used in uri path from request
+ leftovers = copy.deepcopy(transcoded_value)
+ for path_field, _ in fields:
+ delete_field(leftovers, path_field)
+
+ # Assign body and query params
+ body = http_option.get("body")
+
+ if body:
+ if body == "*":
+ request["body"] = leftovers
+ if message:
+ request["query_params"] = message.__class__()
+ else:
+ request["query_params"] = {}
+ else:
+ try:
+ if message:
+ request["body"] = getattr(leftovers, body)
+ delete_field(leftovers, body)
+ else:
+ request["body"] = leftovers.pop(body)
+ except (KeyError, AttributeError):
+ continue
+ request["query_params"] = leftovers
+ else:
+ request["query_params"] = leftovers
+ request["method"] = http_option["method"]
+ return request
+
+ bindings_description = [
+ '\n\tURI: "{}"'
+ "\n\tRequired request fields:\n\t\t{}".format(
+ uri,
+ "\n\t\t".join(
+ [
+ 'field: "{}", pattern: "{}"'.format(n, p if p else "*")
+ for n, p in fields
+ ]
+ ),
+ )
+ for uri, fields in bindings
+ ]
+
+ raise ValueError(
+ "Invalid request."
+ "\nSome of the fields of the request message are either not initialized or "
+ "initialized with an invalid value."
+ "\nPlease make sure your request matches at least one accepted HTTP binding."
+ "\nTo match a binding the request message must have all the required fields "
+ "initialized with values matching their patterns as listed below:{}".format(
+ "\n".join(bindings_description)
+ )
+ )
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
index 365ef25c..30cd7c85 100644
--- a/google/api_core/protobuf_helpers.py
+++ b/google/api_core/protobuf_helpers.py
@@ -15,6 +15,7 @@
"""Helpers for :mod:`protobuf`."""
import collections
+import collections.abc
import copy
import inspect
@@ -22,11 +23,6 @@
from google.protobuf import message
from google.protobuf import wrappers_pb2
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
-
_SENTINEL = object()
_WRAPPER_TYPES = (
@@ -67,9 +63,7 @@ def from_any_pb(pb_type, any_pb):
# Unpack the Any object and populate the protobuf message instance.
if not any_pb.Unpack(msg_pb):
raise TypeError(
- "Could not convert {} to {}".format(
- any_pb.__class__.__name__, pb_type.__name__
- )
+ f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`"
)
# Done; return the message.
@@ -179,7 +173,7 @@ def get(msg_or_dict, key, default=_SENTINEL):
# If we get something else, complain.
if isinstance(msg_or_dict, message.Message):
answer = getattr(msg_or_dict, key, default)
- elif isinstance(msg_or_dict, collections_abc.Mapping):
+ elif isinstance(msg_or_dict, collections.abc.Mapping):
answer = msg_or_dict.get(key, default)
else:
raise TypeError(
@@ -204,7 +198,7 @@ def _set_field_on_message(msg, key, value):
"""Set helper for protobuf Messages."""
# Attempt to set the value on the types of objects we know how to deal
# with.
- if isinstance(value, (collections_abc.MutableSequence, tuple)):
+ if isinstance(value, (collections.abc.MutableSequence, tuple)):
# Clear the existing repeated protobuf message of any elements
# currently inside it.
while getattr(msg, key):
@@ -212,13 +206,13 @@ def _set_field_on_message(msg, key, value):
# Write our new elements to the repeated field.
for item in value:
- if isinstance(item, collections_abc.Mapping):
+ if isinstance(item, collections.abc.Mapping):
getattr(msg, key).add(**item)
else:
# protobuf's RepeatedCompositeContainer doesn't support
# append.
getattr(msg, key).extend([item])
- elif isinstance(value, collections_abc.Mapping):
+ elif isinstance(value, collections.abc.Mapping):
# Assign the dictionary values to the protobuf message.
for item_key, item_value in value.items():
set(getattr(msg, key), item_key, item_value)
@@ -241,7 +235,7 @@ def set(msg_or_dict, key, value):
TypeError: If ``msg_or_dict`` is not a Message or dictionary.
"""
# Sanity check: Is our target object valid?
- if not isinstance(msg_or_dict, (collections_abc.MutableMapping, message.Message)):
+ if not isinstance(msg_or_dict, (collections.abc.MutableMapping, message.Message)):
raise TypeError(
"set() expected a dict or protobuf message, got {!r}.".format(
type(msg_or_dict)
@@ -254,12 +248,12 @@ def set(msg_or_dict, key, value):
# If a subkey exists, then get that object and call this method
# recursively against it using the subkey.
if subkey is not None:
- if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
msg_or_dict.setdefault(basekey, {})
set(get(msg_or_dict, basekey), subkey, value)
return
- if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
msg_or_dict[key] = value
else:
_set_field_on_message(msg_or_dict, key, value)
@@ -292,10 +286,10 @@ def field_mask(original, modified):
Args:
original (~google.protobuf.message.Message): the original message.
- If set to None, this field will be interpretted as an empty
+ If set to None, this field will be interpreted as an empty
message.
modified (~google.protobuf.message.Message): the modified message.
- If set to None, this field will be interpretted as an empty
+ If set to None, this field will be interpreted as an empty
message.
Returns:
@@ -317,7 +311,7 @@ def field_mask(original, modified):
modified = copy.deepcopy(original)
modified.Clear()
- if type(original) != type(modified):
+ if not isinstance(original, type(modified)):
raise ValueError(
"expected that both original and modified should be of the "
'same type, received "{!r}" and "{!r}".'.format(
@@ -357,6 +351,13 @@ def _field_mask_helper(original, modified, current=""):
def _get_path(current, name):
+ # gapic-generator-python appends underscores to field names
+ # that collide with python keywords.
+ # `_` is stripped away as it is not possible to
+ # natively define a field with a trailing underscore in protobuf.
+ # APIs will reject field masks if fields have trailing underscores.
+ # See https://github.com/googleapis/python-api-core/issues/227
+ name = name.rstrip("_")
if not current:
return name
return "%s.%s" % (current, name)
diff --git a/google/api_core/py.typed b/google/api_core/py.typed
new file mode 100644
index 00000000..1d5517b1
--- /dev/null
+++ b/google/api_core/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-api-core package uses inline types.
diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py
new file mode 100644
index 00000000..a78822f1
--- /dev/null
+++ b/google/api_core/rest_helpers.py
@@ -0,0 +1,109 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for rest transports."""
+
+import functools
+import operator
+
+
+def flatten_query_params(obj, strict=False):
+ """Flatten a dict into a list of (name,value) tuples.
+
+ The result is suitable for setting query params on an http request.
+
+ .. code-block:: python
+
+ >>> obj = {'a':
+ ... {'b':
+ ... {'c': ['x', 'y', 'z']} },
+ ... 'd': 'uvw',
+ ... 'e': True, }
+ >>> flatten_query_params(obj, strict=True)
+ [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')]
+
+ Note that, as described in
+ https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
+ repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
+ This is enforced in this function.
+
+ Args:
+ obj: a possibly nested dictionary (from json), or None
+ strict: a bool, defaulting to False, to enforce that all values in the
+ result tuples be strings and, if boolean, lower-cased.
+
+ Returns: a list of tuples, with each tuple having a (possibly) multi-part name
+ and a scalar value.
+
+ Raises:
+ TypeError if obj is not a dict or None
+ ValueError if obj contains a list of non-primitive values.
+ """
+
+ if obj is not None and not isinstance(obj, dict):
+ raise TypeError("flatten_query_params must be called with dict object")
+
+ return _flatten(obj, key_path=[], strict=strict)
+
+
+def _flatten(obj, key_path, strict=False):
+ if obj is None:
+ return []
+ if isinstance(obj, dict):
+ return _flatten_dict(obj, key_path=key_path, strict=strict)
+ if isinstance(obj, list):
+ return _flatten_list(obj, key_path=key_path, strict=strict)
+ return _flatten_value(obj, key_path=key_path, strict=strict)
+
+
+def _is_primitive_value(obj):
+ if obj is None:
+ return False
+
+ if isinstance(obj, (list, dict)):
+ raise ValueError("query params may not contain repeated dicts or lists")
+
+ return True
+
+
+def _flatten_value(obj, key_path, strict=False):
+ return [(".".join(key_path), _canonicalize(obj, strict=strict))]
+
+
+def _flatten_dict(obj, key_path, strict=False):
+ items = (
+ _flatten(value, key_path=key_path + [key], strict=strict)
+ for key, value in obj.items()
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _flatten_list(elems, key_path, strict=False):
+ # Only lists of scalar values are supported.
+ # The name (key_path) is repeated for each value.
+ items = (
+ _flatten_value(elem, key_path=key_path, strict=strict)
+ for elem in elems
+ if _is_primitive_value(elem)
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _canonicalize(obj, strict=False):
+ if strict:
+ value = str(obj)
+ if isinstance(obj, bool):
+ value = value.lower()
+ return value
+ return obj
diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py
new file mode 100644
index 00000000..84aa270c
--- /dev/null
+++ b/google/api_core/rest_streaming.py
@@ -0,0 +1,66 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from typing import Union
+
+import proto
+import requests
+import google.protobuf.message
+from google.api_core._rest_streaming_base import BaseResponseIterator
+
+
+class ResponseIterator(BaseResponseIterator):
+ """Iterator over REST API responses.
+
+ Args:
+ response (requests.Response): An API response object.
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError:
+ - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response: requests.Response,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response = response
+ # Inner iterator over HTTP response's content.
+ self._response_itr = self._response.iter_content(decode_unicode=True)
+ super(ResponseIterator, self).__init__(
+ response_message_cls=response_message_cls
+ )
+
+ def cancel(self):
+ """Cancel existing streaming operation."""
+ self._response.close()
+
+ def __next__(self):
+ while not self._ready_objs:
+ try:
+ chunk = next(self._response_itr)
+ self._process_chunk(chunk)
+ except StopIteration as e:
+ if self._level > 0:
+ raise ValueError("Unfinished stream: %s" % self._obj)
+ raise e
+ return self._grab()
+
+ def __iter__(self):
+ return self
diff --git a/google/api_core/rest_streaming_async.py b/google/api_core/rest_streaming_async.py
new file mode 100644
index 00000000..370c2b53
--- /dev/null
+++ b/google/api_core/rest_streaming_async.py
@@ -0,0 +1,89 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for asynchronous server-side streaming in REST."""
+
+from typing import Union
+
+import proto
+
+try:
+ import google.auth.aio.transport
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "`google-api-core[async_rest]` is required to use asynchronous rest streaming. "
+ "Install the `async_rest` extra of `google-api-core` using "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+import google.protobuf.message
+from google.api_core._rest_streaming_base import BaseResponseIterator
+
+
+class AsyncResponseIterator(BaseResponseIterator):
+ """Asynchronous Iterator over REST API responses.
+
+ Args:
+ response (google.auth.aio.transport.Response): An API response object.
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError:
+ - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response: google.auth.aio.transport.Response,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response = response
+ self._chunk_size = 1024
+ # TODO(https://github.com/googleapis/python-api-core/issues/703): mypy does not recognize the abstract content
+ # method as an async generator as it looks for the `yield` keyword in the implementation.
+ # Given that the abstract method is not implemented, mypy fails to recognize it as an async generator.
+ # mypy warnings are silenced until the linked issue is resolved.
+ self._response_itr = self._response.content(self._chunk_size).__aiter__() # type: ignore
+ super(AsyncResponseIterator, self).__init__(
+ response_message_cls=response_message_cls
+ )
+
+ async def __aenter__(self):
+ return self
+
+ async def cancel(self):
+ """Cancel existing streaming operation."""
+ await self._response.close()
+
+ async def __anext__(self):
+ while not self._ready_objs:
+ try:
+ chunk = await self._response_itr.__anext__()
+ chunk = chunk.decode("utf-8")
+ self._process_chunk(chunk)
+ except StopAsyncIteration as e:
+ if self._level > 0:
+ raise ValueError("i Unfinished stream: %s" % self._obj)
+ raise e
+ except ValueError as e:
+ raise e
+ return self._grab()
+
+ def __aiter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ """Cancel existing async streaming operation."""
+ await self._response.close()
diff --git a/google/api_core/retry.py b/google/api_core/retry.py
deleted file mode 100644
index ea890628..00000000
--- a/google/api_core/retry.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for retrying functions with exponential back-off.
-
-The :class:`Retry` decorator can be used to retry functions that raise
-exceptions using exponential backoff. Because a exponential sleep algorithm is
-used, the retry is limited by a `deadline`. The deadline is the maxmimum amount
-of time a method can block. This is used instead of total number of retries
-because it is difficult to ascertain the amount of time a function can block
-when using total number of retries and exponential backoff.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry.Retry()
- def call_flaky_rpc():
- return client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
- def check_if_exists():
- return client.does_thing_exist()
-
- is_available = check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry.Retry(deadline=60)
- result = client.some_method(retry=my_retry)
-
-"""
-
-from __future__ import unicode_literals
-
-import datetime
-import functools
-import logging
-import random
-import time
-
-import six
-
-from google.api_core import datetime_helpers
-from google.api_core import exceptions
-from google.api_core import general_helpers
-
-_LOGGER = logging.getLogger(__name__)
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-
-
-def if_exception_type(*exception_types):
- """Creates a predicate to check if the exception is of a given type.
-
- Args:
- exception_types (Sequence[:func:`type`]): The exception types to check
- for.
-
- Returns:
- Callable[Exception]: A predicate that returns True if the provided
- exception is of the given type(s).
- """
-
- def if_exception_type_predicate(exception):
- """Bound predicate for checking an exception type."""
- return isinstance(exception, exception_types)
-
- return if_exception_type_predicate
-
-
-# pylint: disable=invalid-name
-# Pylint sees this as a constant, but it is also an alias that should be
-# considered a function.
-if_transient_error = if_exception_type(
- exceptions.InternalServerError,
- exceptions.TooManyRequests,
- exceptions.ServiceUnavailable,
-)
-"""A predicate that checks if an exception is a transient API error.
-
-The following server errors are considered transient:
-
-- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
- ``INTERNAL(13)`` and its subclasses.
-- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
-- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
-- :class:`google.api_core.exceptions.ResourceExhausted` - gRPC
- ``RESOURCE_EXHAUSTED(8)``
-"""
-# pylint: enable=invalid-name
-
-
-def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
- """Generates sleep intervals based on the exponential back-off algorithm.
-
- This implements the `Truncated Exponential Back-off`_ algorithm.
-
- .. _Truncated Exponential Back-off:
- https://cloud.google.com/storage/docs/exponential-backoff
-
- Args:
- initial (float): The minimum amount of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amount of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Yields:
- float: successive sleep intervals.
- """
- delay = initial
- while True:
- # Introduce jitter by yielding a delay that is uniformly distributed
- # to average out to the delay time.
- yield min(random.uniform(0.0, delay * 2.0), maximum)
- delay = delay * multiplier
-
-
-def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
- """Call a function and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- deadline (float): How long to keep retrying the target. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing a
- retryable exception. Any error raised by this function will *not*
- be caught.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- google.api_core.RetryError: If the deadline is exceeded while retrying.
- ValueError: If the sleep generator stops yielding values.
- Exception: If the target raises a method that isn't retryable.
- """
- if deadline is not None:
- deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
- seconds=deadline
- )
- else:
- deadline_datetime = None
-
- last_exc = None
-
- for sleep in sleep_generator:
- try:
- return target()
-
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- if not predicate(exc):
- raise
- last_exc = exc
- if on_error is not None:
- on_error(exc)
-
- now = datetime_helpers.utcnow()
-
- if deadline_datetime is not None:
- if deadline_datetime <= now:
- six.raise_from(
- exceptions.RetryError(
- "Deadline of {:.1f}s exceeded while calling {}".format(
- deadline, target
- ),
- last_exc,
- ),
- last_exc,
- )
- else:
- time_to_deadline = (deadline_datetime - now).total_seconds()
- sleep = min(time_to_deadline, sleep)
-
- _LOGGER.debug(
- "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
- )
- time.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
-
-
-@six.python_2_unicode_compatible
-class Retry(object):
- """Exponential retry decorator.
-
- This class is a decorator used to add exponential back-off retry behavior
- to an RPC call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum a,out of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amout of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- deadline (float): How long to keep retrying in seconds. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- """
-
- def __init__(
- self,
- predicate=if_transient_error,
- initial=_DEFAULT_INITIAL_DELAY,
- maximum=_DEFAULT_MAXIMUM_DELAY,
- multiplier=_DEFAULT_DELAY_MULTIPLIER,
- deadline=_DEFAULT_DEADLINE,
- on_error=None,
- ):
- self._predicate = predicate
- self._initial = initial
- self._multiplier = multiplier
- self._maximum = maximum
- self._deadline = deadline
- self._on_error = on_error
-
- def __call__(self, func, on_error=None):
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @general_helpers.wraps(func)
- def retry_wrapped_func(*args, **kwargs):
- """A wrapper that calls target function with retry."""
- target = functools.partial(func, *args, **kwargs)
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return retry_target(
- target,
- self._predicate,
- sleep_generator,
- self._deadline,
- on_error=on_error,
- )
-
- return retry_wrapped_func
-
- @property
- def deadline(self):
- return self._deadline
-
- def with_deadline(self, deadline):
- """Return a copy of this retry with the given deadline.
-
- Args:
- deadline (float): How long to keep retrying.
-
- Returns:
- Retry: A new retry instance with the given deadline.
- """
- return Retry(
- predicate=self._predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- deadline=deadline,
- on_error=self._on_error,
- )
-
- def with_predicate(self, predicate):
- """Return a copy of this retry with the given predicate.
-
- Args:
- predicate (Callable[Exception]): A callable that should return
- ``True`` if the given exception is retryable.
-
- Returns:
- Retry: A new retry instance with the given predicate.
- """
- return Retry(
- predicate=predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- deadline=self._deadline,
- on_error=self._on_error,
- )
-
- def with_delay(self, initial=None, maximum=None, multiplier=None):
- """Return a copy of this retry with the given delay options.
-
- Args:
- initial (float): The minimum amount of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amount of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Returns:
- Retry: A new retry instance with the given predicate.
- """
- return Retry(
- predicate=self._predicate,
- initial=initial if initial is not None else self._initial,
- maximum=maximum if maximum is not None else self._maximum,
- multiplier=multiplier if maximum is not None else self._multiplier,
- deadline=self._deadline,
- on_error=self._on_error,
- )
-
- def __str__(self):
- return (
- "".format(
- self._predicate,
- self._initial,
- self._maximum,
- self._multiplier,
- self._deadline,
- self._on_error,
- )
- )
diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py
new file mode 100644
index 00000000..1724fdbd
--- /dev/null
+++ b/google/api_core/retry/__init__.py
@@ -0,0 +1,52 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Retry implementation for Google API client libraries."""
+
+from .retry_base import exponential_sleep_generator
+from .retry_base import if_exception_type
+from .retry_base import if_transient_error
+from .retry_base import build_retry_error
+from .retry_base import RetryFailureReason
+from .retry_unary import Retry
+from .retry_unary import retry_target
+from .retry_unary_async import AsyncRetry
+from .retry_unary_async import retry_target as retry_target_async
+from .retry_streaming import StreamingRetry
+from .retry_streaming import retry_target_stream
+from .retry_streaming_async import AsyncStreamingRetry
+from .retry_streaming_async import retry_target_stream as retry_target_stream_async
+
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.auth import exceptions as auth_exceptions # noqa: F401
+
+__all__ = (
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "build_retry_error",
+ "RetryFailureReason",
+ "Retry",
+ "AsyncRetry",
+ "StreamingRetry",
+ "AsyncStreamingRetry",
+ "retry_target",
+ "retry_target_async",
+ "retry_target_stream",
+ "retry_target_stream_async",
+)
diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py
new file mode 100644
index 00000000..263b4ccf
--- /dev/null
+++ b/google/api_core/retry/retry_base.py
@@ -0,0 +1,370 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared classes and functions for retrying requests.
+
+:class:`_BaseRetry` is the base class for :class:`Retry`,
+:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`.
+"""
+
+from __future__ import annotations
+
+import logging
+import random
+import time
+
+from enum import Enum
+from typing import Any, Callable, Optional, Iterator, TYPE_CHECKING
+
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.auth import exceptions as auth_exceptions
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 11):
+ from typing import Self
+ else:
+ from typing_extensions import Self
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+_LOGGER = logging.getLogger("google.api_core.retry")
+
+
+def if_exception_type(
+ *exception_types: type[Exception],
+) -> Callable[[Exception], bool]:
+ """Creates a predicate to check if the exception is of a given type.
+
+ Args:
+ exception_types (Sequence[:func:`type`]): The exception types to check
+ for.
+
+ Returns:
+ Callable[Exception]: A predicate that returns True if the provided
+ exception is of the given type(s).
+ """
+
+ def if_exception_type_predicate(exception: Exception) -> bool:
+ """Bound predicate for checking an exception type."""
+ return isinstance(exception, exception_types)
+
+ return if_exception_type_predicate
+
+
+# pylint: disable=invalid-name
+# Pylint sees this as a constant, but it is also an alias that should be
+# considered a function.
+if_transient_error = if_exception_type(
+ exceptions.InternalServerError,
+ exceptions.TooManyRequests,
+ exceptions.ServiceUnavailable,
+ requests.exceptions.ConnectionError,
+ requests.exceptions.ChunkedEncodingError,
+ auth_exceptions.TransportError,
+)
+"""A predicate that checks if an exception is a transient API error.
+
+The following server errors are considered transient:
+
+- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
+ ``INTERNAL(13)`` and its subclasses.
+- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
+- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
+- :class:`requests.exceptions.ConnectionError`
+- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
+ chunked encoding but sent an invalid chunk.
+- :class:`google.auth.exceptions.TransportError` - Used to indicate an
+ error occurred during an HTTP request.
+"""
+# pylint: enable=invalid-name
+
+
+def exponential_sleep_generator(
+ initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER
+):
+ """Generates sleep intervals based on the exponential back-off algorithm.
+
+ This implements the `Truncated Exponential Back-off`_ algorithm.
+
+ .. _Truncated Exponential Back-off:
+ https://cloud.google.com/storage/docs/exponential-backoff
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Yields:
+ float: successive sleep intervals.
+ """
+ max_delay = min(initial, maximum)
+ while True:
+ yield random.uniform(0.0, max_delay)
+ max_delay = min(max_delay * multiplier, maximum)
+
+
+class RetryFailureReason(Enum):
+ """
+ The cause of a failed retry, used when building exceptions
+ """
+
+ TIMEOUT = 0
+ NON_RETRYABLE_ERROR = 1
+
+
+def build_retry_error(
+ exc_list: list[Exception],
+ reason: RetryFailureReason,
+ timeout_val: float | None,
+ **kwargs: Any,
+) -> tuple[Exception, Exception | None]:
+ """
+ Default exception_factory implementation.
+
+ Returns a RetryError if the failure is due to a timeout, otherwise
+ returns the last exception encountered.
+
+ Args:
+ - exc_list: list of exceptions that occurred during the retry
+ - reason: reason for the retry failure.
+ Can be TIMEOUT or NON_RETRYABLE_ERROR
+ - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message
+
+ Returns:
+ - tuple: a tuple of the exception to be raised, and the cause exception if any
+ """
+ if reason == RetryFailureReason.TIMEOUT:
+ # return RetryError with the most recent exception as the cause
+ src_exc = exc_list[-1] if exc_list else None
+ timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else ""
+ return (
+ exceptions.RetryError(
+ f"Timeout {timeout_val_str}exceeded",
+ src_exc,
+ ),
+ src_exc,
+ )
+ elif exc_list:
+ # return most recent exception encountered
+ return exc_list[-1], None
+ else:
+ # no exceptions were given in exc_list. Raise generic RetryError
+ return exceptions.RetryError("Unknown error", None), None
+
+
+def _retry_error_helper(
+ exc: Exception,
+ deadline: float | None,
+ sleep_iterator: Iterator[float],
+ error_list: list[Exception],
+ predicate_fn: Callable[[Exception], bool],
+ on_error_fn: Callable[[Exception], None] | None,
+ exc_factory_fn: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ],
+ original_timeout: float | None,
+) -> float:
+ """
+ Shared logic for handling an error for all retry implementations
+
+ - Raises an error on timeout or non-retryable error
+ - Calls on_error_fn if provided
+ - Logs the error
+
+ Args:
+ - exc: the exception that was raised
+ - deadline: the deadline for the retry, calculated as a diff from time.monotonic()
+ - sleep_iterator: iterator to draw the next backoff value from
+ - error_list: the list of exceptions that have been raised so far
+ - predicate_fn: takes `exc` and returns true if the operation should be retried
+ - on_error_fn: callback to execute when a retryable error occurs
+ - exc_factory_fn: callback used to build the exception to be raised on terminal failure
+ - original_timeout_val: the original timeout value for the retry (in seconds),
+ to be passed to the exception factory for building an error message
+ Returns:
+ - the sleep value chosen before the next attempt
+ """
+ error_list.append(exc)
+ if not predicate_fn(exc):
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.NON_RETRYABLE_ERROR,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ if on_error_fn is not None:
+ on_error_fn(exc)
+ # next_sleep is fetched after the on_error callback, to allow clients
+ # to update sleep_iterator values dynamically in response to errors
+ try:
+ next_sleep = next(sleep_iterator)
+ except StopIteration:
+ raise ValueError("Sleep generator stopped yielding sleep values.") from exc
+ if deadline is not None and time.monotonic() + next_sleep > deadline:
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.TIMEOUT,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep)
+ )
+ return next_sleep
+
+
+class _BaseRetry(object):
+ """
+ Base class for retry configuration objects. This class is intended to capture retry
+ and backoff configuration that is common to both synchronous and asynchronous retries,
+ for both unary and streaming RPCs. It is not intended to be instantiated directly,
+ but rather to be subclassed by the various retry configuration classes.
+ """
+
+ def __init__(
+ self,
+ predicate: Callable[[Exception], bool] = if_transient_error,
+ initial: float = _DEFAULT_INITIAL_DELAY,
+ maximum: float = _DEFAULT_MAXIMUM_DELAY,
+ multiplier: float = _DEFAULT_DELAY_MULTIPLIER,
+ timeout: Optional[float] = _DEFAULT_DEADLINE,
+ on_error: Optional[Callable[[Exception], Any]] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._timeout = kwargs.get("deadline", timeout)
+ self._deadline = self._timeout
+ self._on_error = on_error
+
+ def __call__(self, *args, **kwargs) -> Any:
+ raise NotImplementedError("Not implemented in base class")
+
+ @property
+ def deadline(self) -> float | None:
+ """
+ DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class
+ documentation for details.
+ """
+ return self._timeout
+
+ @property
+ def timeout(self) -> float | None:
+ return self._timeout
+
+ def with_deadline(self, deadline: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class
+ documentation for details.
+
+ Args:
+ deadline (float|None): How long to keep retrying, in seconds. If None,
+ no timeout is enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return self.with_timeout(deadline)
+
+ def with_timeout(self, timeout: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ Args:
+ timeout (float): How long to keep retrying, in seconds. If None,
+ no timeout will be enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=timeout,
+ on_error=self._on_error,
+ )
+
+ def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self:
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return type(self)(
+ predicate=predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def with_delay(
+ self,
+ initial: Optional[float] = None,
+ maximum: Optional[float] = None,
+ multiplier: Optional[float] = None,
+ ) -> Self:
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amount of time to delay (in seconds). This must
+ be greater than 0. If None, the current value is used.
+ maximum (float): The maximum amount of time to delay (in seconds). If None, the
+ current value is used.
+ multiplier (float): The multiplier applied to the delay. If None, the current
+ value is used.
+
+ Returns:
+ Retry: A new retry instance with the given delay options.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=initial if initial is not None else self._initial,
+ maximum=maximum if maximum is not None else self._maximum,
+ multiplier=multiplier if multiplier is not None else self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def __str__(self) -> str:
+ return (
+ "<{} predicate={}, initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, timeout={}, on_error={}>".format(
+ type(self).__name__,
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._timeout, # timeout can be None, thus no {:.1f}
+ self._on_error,
+ )
+ )
diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py
new file mode 100644
index 00000000..e4474c8a
--- /dev/null
+++ b/google/api_core/retry/retry_streaming.py
@@ -0,0 +1,264 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ Callable,
+ Optional,
+ List,
+ Tuple,
+ Iterable,
+ Generator,
+ TypeVar,
+ Any,
+ TYPE_CHECKING,
+)
+
+import sys
+import time
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+def retry_target_stream(
+ target: Callable[_P, Iterable[_Y]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: Optional[float] = None,
+ on_error: Optional[Callable[[Exception], None]] = None,
+ exception_factory: Callable[
+ [List[Exception], RetryFailureReason, Optional[float]],
+ Tuple[Exception, Optional[Exception]],
+ ] = build_retry_error,
+ init_args: tuple = (),
+ init_kwargs: dict = {},
+ **kwargs,
+) -> Generator[_Y, Any, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ Generator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+ deadline: Optional[float] = (
+ time.monotonic() + timeout if timeout is not None else None
+ )
+ error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
+
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ subgenerator = target(*init_args, **init_kwargs)
+ return (yield from subgenerator)
+ # handle exceptions raised by the subgenerator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ next_sleep = _retry_error_helper(
+ exc,
+ deadline,
+ sleep_iter,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(next_sleep)
+
+
+class StreamingRetry(_BaseRetry):
+ """Exponential retry decorator for streaming synchronous RPCs.
+
+ This class returns a Generator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ network call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = target(new_request)
+ for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request)
+ retryable_generator = retry_wrapped_fn(target, request)
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = StreamingRetry(...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ for item in retryable_gen():
+ if stream_idx >= len(seen_items):
+ seen_items.append(item)
+ yield item
+ elif item != seen_items[stream_idx]:
+ raise ValueError("Stream differs from last attempt")
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (float): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, Iterable[_Y]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Generator[_Y, Any, None]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> Generator[_Y, Any, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py
new file mode 100644
index 00000000..5e5fa240
--- /dev/null
+++ b/google/api_core/retry/retry_streaming_async.py
@@ -0,0 +1,328 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable async streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ cast,
+ Any,
+ Callable,
+ Iterable,
+ AsyncIterator,
+ AsyncIterable,
+ Awaitable,
+ TypeVar,
+ AsyncGenerator,
+ TYPE_CHECKING,
+)
+
+import asyncio
+import time
+import sys
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+async def retry_target_stream(
+ target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ init_args: tuple = (),
+ init_kwargs: dict = {},
+ **kwargs,
+) -> AsyncGenerator[_Y, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`AsyncRetry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ AsyncGenerator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+ target_iterator: AsyncIterator[_Y] | None = None
+ timeout = kwargs.get("deadline", timeout)
+ deadline = time.monotonic() + timeout if timeout else None
+ # keep track of retryable exceptions we encounter to pass in to exception_factory
+ error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
+ target_is_generator: bool | None = None
+
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target(
+ *init_args, **init_kwargs
+ )
+ try:
+ # gapic functions return the generator behind an awaitable
+ # unwrap the awaitable so we can work with the generator directly
+ target_output = await target_output # type: ignore
+ except TypeError:
+ # was not awaitable, continue
+ pass
+ target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__()
+
+ if target_is_generator is None:
+ # Check if target supports generator features (asend, athrow, aclose)
+ target_is_generator = bool(getattr(target_iterator, "asend", None))
+
+ sent_in = None
+ while True:
+ ## Read from target_iterator
+ # If the target is a generator, we will advance it with `asend`
+ # otherwise, we will use `anext`
+ if target_is_generator:
+ next_value = await target_iterator.asend(sent_in) # type: ignore
+ else:
+ next_value = await target_iterator.__anext__()
+ ## Yield from Wrapper to caller
+ try:
+ # yield latest value from target
+ # exceptions from `athrow` and `aclose` are injected here
+ sent_in = yield next_value
+ except GeneratorExit:
+ # if wrapper received `aclose` while waiting on yield,
+ # it will raise GeneratorExit here
+ if target_is_generator:
+ # pass to inner target_iterator for handling
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+ else:
+ raise
+ return
+ except: # noqa: E722
+ # bare except catches any exception passed to `athrow`
+ if target_is_generator:
+ # delegate error handling to target_iterator
+ await cast(AsyncGenerator["_Y", None], target_iterator).athrow(
+ cast(BaseException, sys.exc_info()[1])
+ )
+ else:
+ raise
+ return
+ except StopAsyncIteration:
+ # if iterator exhausted, return
+ return
+ # handle exceptions raised by the target_iterator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ next_sleep = _retry_error_helper(
+ exc,
+ deadline,
+ sleep_iter,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(next_sleep)
+
+ finally:
+ if target_is_generator and target_iterator is not None:
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+
+
+class AsyncStreamingRetry(_BaseRetry):
+ """Exponential retry decorator for async streaming rpcs.
+
+ This class returns an AsyncGenerator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream is encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ grpc call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ async def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = await target(new_request)
+ async for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, [])
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ async def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = AsyncRetry(is_stream=True, ...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ async for item in retryable_gen:
+ if stream_idx >= len(seen_items):
+ yield item
+ seen_items.append(item)
+ elif item != previous_stream[stream_idx]:
+ raise ValueError("Stream differs from last attempt")"
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ is_stream (bool): Indicates whether the input function
+ should be treated as a stream function (i.e. an AsyncGenerator,
+ or function or coroutine that returns an AsyncIterable).
+ If True, the iterable will be wrapped with retry logic, and any
+ failed outputs will restart the stream. If False, only the input
+ function call itself will be retried. Defaults to False.
+ To avoid duplicate values, retryable streams should typically be
+ wrapped in additional filter logic before use.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> AsyncGenerator[_Y, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ self._predicate,
+ sleep_generator,
+ self._timeout,
+ on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py
new file mode 100644
index 00000000..6d36bc7d
--- /dev/null
+++ b/google/api_core/retry/retry_unary.py
@@ -0,0 +1,302 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying functions with exponential back-off.
+
+The :class:`Retry` decorator can be used to retry functions that raise
+exceptions using exponential backoff. Because a exponential sleep algorithm is
+used, the retry is limited by a `timeout`. The timeout determines the window
+in which retries will be attempted. This is used instead of total number of retries
+because it is difficult to ascertain the amount of time a function can block
+when using total number of retries and exponential backoff.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry.Retry()
+ def call_flaky_rpc():
+ return client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
+ def check_if_exists():
+ return client.does_thing_exist()
+
+ is_available = check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry.Retry(timeout=60)
+ result = client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import functools
+import sys
+import time
+import inspect
+import warnings
+from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead."
+
+
+def retry_target(
+ target: Callable[[], _R],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED: use ``timeout`` instead. For backward
+ compatibility, if specified it will override ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
+
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
+ try:
+ result = target()
+ if inspect.isawaitable(result):
+ warnings.warn(_ASYNC_RETRY_WARNING)
+ return result
+
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ next_sleep = _retry_error_helper(
+ exc,
+ deadline,
+ sleep_iter,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(next_sleep)
+
+
+class Retry(_BaseRetry):
+ """Exponential retry decorator for unary synchronous RPCs.
+
+ This class is a decorator used to add retry or polling behavior to an RPC
+ call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ There are two important concepts that retry/polling behavior may operate on,
+ Deadline and Timeout, which need to be properly defined for the correct
+ usage of this class and the rest of the library.
+
+ Deadline: a fixed point in time by which a certain operation must
+ terminate. For example, if a certain operation has a deadline
+ "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an
+ error) by that time, regardless of when it was started or whether it
+ was started at all.
+
+ Timeout: the maximum duration of time after which a certain operation
+ must terminate (successfully or with an error). The countdown begins right
+ after an operation was started. For example, if an operation was started at
+ 09:24:00 with timeout of 75 seconds, it must terminate no later than
+ 09:25:15.
+
+ Unfortunately, in the past this class (and the api-core library as a whole) has not
+ been properly distinguishing the concepts of "timeout" and "deadline", and the
+ ``deadline`` parameter has meant ``timeout``. That is why
+ ``deadline`` has been deprecated and ``timeout`` should be used instead. If the
+ ``deadline`` parameter is set, it will override the ``timeout`` parameter.
+ In other words, ``retry.deadline`` should be treated as just a deprecated alias for
+ ``retry.timeout``.
+
+ Said another way, it is safe to assume that this class and the rest of this
+ library operate in terms of timeouts (not deadlines) unless explicitly
+ noted the usage of deadline semantics.
+
+ It is also important to
+ understand the three most common applications of the Timeout concept in the
+ context of this library.
+
+ Usually the generic Timeout term may stand for one of the following actual
+ timeouts: RPC Timeout, Retry Timeout, or Polling Timeout.
+
+ RPC Timeout: a value supplied by the client to the server so
+ that the server side knows the maximum amount of time it is expected to
+ spend handling that specific RPC. For example, in the case of gRPC transport,
+ RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2
+ request. The `timeout` property of this class normally never represents the
+ RPC Timeout as it is handled separately by the ``google.api_core.timeout``
+ module of this library.
+
+ Retry Timeout: this is the most common meaning of the ``timeout`` property
+ of this class, and defines how long a certain RPC may be retried in case
+ the server returns an error.
+
+ Polling Timeout: defines how long the
+ client side is allowed to call the polling RPC repeatedly to check a status of a
+ long-running operation. Each polling RPC is
+ expected to succeed (its errors are supposed to be handled by the retry
+ logic). The decision as to whether a new polling attempt needs to be made is based
+ not on the RPC status code but on the status of the returned
+ status of an operation. In other words: we will poll a long-running operation until
+ the operation is done or the polling timeout expires. Each poll will inform us of
+ the status of the operation. The poll consists of an RPC to the server that may
+ itself be retried as per the poll-specific retry settings in case of errors. The
+ operation-level retry settings do NOT apply to polling-RPC retries.
+
+ With the actual timeout types being defined above, the client libraries
+ often refer to just Timeout without clarifying which type specifically
+ that is. In that case the actual timeout type (sometimes also referred to as
+ Logical Timeout) can be determined from the context. If it is a unary rpc
+ call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if
+ provided directly as a standalone value) or Retry Timeout (if provided as
+ ``retry.timeout`` property of the unary RPC's retry config). For
+ ``Operation`` or ``PollingFuture`` in general Timeout stands for
+ Polling Timeout.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, _R],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, _R]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py
new file mode 100644
index 00000000..1f72476a
--- /dev/null
+++ b/google/api_core/retry/retry_unary_async.py
@@ -0,0 +1,239 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying coroutine functions with exponential back-off.
+
+The :class:`AsyncRetry` decorator shares most functionality and behavior with
+:class:`Retry`, but supports coroutine functions. Please refer to description
+of :class:`Retry` for more details.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry()
+ async def call_flaky_rpc():
+ return await client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = await call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
+ async def check_if_exists():
+ return await client.does_thing_exist()
+
+ is_available = await check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry_async.AsyncRetry(timeout=60)
+ result = await client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import asyncio
+import time
+import functools
+from typing import (
+ Awaitable,
+ Any,
+ Callable,
+ Iterable,
+ TypeVar,
+ TYPE_CHECKING,
+)
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+# for backwards compatibility, expose helpers in this module
+from google.api_core.retry.retry_base import if_exception_type # noqa
+from google.api_core.retry.retry_base import if_transient_error # noqa
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds
+
+
+async def retry_target(
+ target: Callable[[], Awaitable[_R]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Await a coroutine and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable[[], Any]): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED use ``timeout`` instead. For backward
+ compatibility, if set it will override the ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
+
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
+ try:
+ return await target()
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ next_sleep = _retry_error_helper(
+ exc,
+ deadline,
+ sleep_iter,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(next_sleep)
+
+
+class AsyncRetry(_BaseRetry):
+ """Exponential retry decorator for async coroutines.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., Awaitable[_R]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[_R]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return await retry_target(
+ functools.partial(func, *args, **kwargs),
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py
index f925c3d3..90a2d5ad 100644
--- a/google/api_core/retry_async.py
+++ b/google/api_core/retry_async.py
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,272 +11,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-"""Helpers for retrying coroutine functions with exponential back-off.
-
-The :class:`AsyncRetry` decorator shares most functionality and behavior with
-:class:`Retry`, but supports coroutine functions. Please refer to description
-of :class:`Retry` for more details.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry()
- async def call_flaky_rpc():
- return await client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = await call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
- async def check_if_exists():
- return await client.does_thing_exist()
-
- is_available = await check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry_async.AsyncRetry(deadline=60)
- result = await client.some_method(retry=my_retry)
-
-"""
-
-import asyncio
-import datetime
-import functools
-import logging
-
-from google.api_core import datetime_helpers, exceptions
-from google.api_core.retry import (exponential_sleep_generator, # noqa: F401
- if_exception_type, if_transient_error)
-
-_LOGGER = logging.getLogger(__name__)
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-
-
-async def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
- """Call a function and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- deadline (float): How long to keep retrying the target. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing a
- retryable exception. Any error raised by this function will *not*
- be caught.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- google.api_core.RetryError: If the deadline is exceeded while retrying.
- ValueError: If the sleep generator stops yielding values.
- Exception: If the target raises a method that isn't retryable.
- """
- deadline_dt = (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline)) if deadline else None
-
- last_exc = None
-
- for sleep in sleep_generator:
- try:
- if not deadline_dt:
- return await target()
- else:
- return await asyncio.wait_for(
- target(),
- timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds()
- )
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError):
- raise
- last_exc = exc
- if on_error is not None:
- on_error(exc)
-
- now = datetime_helpers.utcnow()
-
- if deadline_dt:
- if deadline_dt <= now:
- # Chains the raising RetryError with the root cause error,
- # which helps observability and debugability.
- raise exceptions.RetryError(
- "Deadline of {:.1f}s exceeded while calling {}".format(
- deadline, target
- ),
- last_exc,
- ) from last_exc
- else:
- time_to_deadline = (deadline_dt - now).total_seconds()
- sleep = min(time_to_deadline, sleep)
-
- _LOGGER.debug(
- "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
- )
- await asyncio.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
-
-
-class AsyncRetry:
- """Exponential retry decorator for async functions.
-
- This class is a decorator used to add exponential back-off retry behavior
- to an RPC call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum a,out of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amout of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- deadline (float): How long to keep retrying in seconds. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- """
-
- def __init__(
- self,
- predicate=if_transient_error,
- initial=_DEFAULT_INITIAL_DELAY,
- maximum=_DEFAULT_MAXIMUM_DELAY,
- multiplier=_DEFAULT_DELAY_MULTIPLIER,
- deadline=_DEFAULT_DEADLINE,
- on_error=None,
- ):
- self._predicate = predicate
- self._initial = initial
- self._multiplier = multiplier
- self._maximum = maximum
- self._deadline = deadline
- self._on_error = on_error
-
- def __call__(self, func, on_error=None):
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- async def retry_wrapped_func(*args, **kwargs):
- """A wrapper that calls target function with retry."""
- target = functools.partial(func, *args, **kwargs)
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return await retry_target(
- target,
- self._predicate,
- sleep_generator,
- self._deadline,
- on_error=on_error,
- )
-
- return retry_wrapped_func
-
- def _replace(self,
- predicate=None,
- initial=None,
- maximum=None,
- multiplier=None,
- deadline=None,
- on_error=None):
- return AsyncRetry(
- predicate=predicate or self._predicate,
- initial=initial or self._initial,
- maximum=maximum or self._maximum,
- multiplier=multiplier or self._multiplier,
- deadline=deadline or self._deadline,
- on_error=on_error or self._on_error,
- )
-
- def with_deadline(self, deadline):
- """Return a copy of this retry with the given deadline.
-
- Args:
- deadline (float): How long to keep retrying.
-
- Returns:
- AsyncRetry: A new retry instance with the given deadline.
- """
- return self._replace(deadline=deadline)
-
- def with_predicate(self, predicate):
- """Return a copy of this retry with the given predicate.
-
- Args:
- predicate (Callable[Exception]): A callable that should return
- ``True`` if the given exception is retryable.
-
- Returns:
- AsyncRetry: A new retry instance with the given predicate.
- """
- return self._replace(predicate=predicate)
-
- def with_delay(self, initial=None, maximum=None, multiplier=None):
- """Return a copy of this retry with the given delay options.
-
- Args:
- initial (float): The minimum amout of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amout of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Returns:
- AsyncRetry: A new retry instance with the given predicate.
- """
- return self._replace(initial=initial, maximum=maximum, multiplier=multiplier)
-
- def __str__(self):
- return (
- "".format(
- self._predicate,
- self._initial,
- self._maximum,
- self._multiplier,
- self._deadline,
- self._on_error,
- )
- )
+#
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.api_core.retry import exponential_sleep_generator # noqa: F401
+from google.api_core.retry import if_exception_type # noqa: F401
+from google.api_core.retry import if_transient_error # noqa: F401
+from google.api_core.retry.retry_unary_async import AsyncRetry
+from google.api_core.retry.retry_unary_async import retry_target
+
+__all__ = (
+ "AsyncRetry",
+ "datetime_helpers",
+ "exceptions",
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "retry_target",
+)
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
index 17c1beab..55b195e9 100644
--- a/google/api_core/timeout.py
+++ b/google/api_core/timeout.py
@@ -14,8 +14,9 @@
"""Decorators for applying timeout arguments to functions.
-These decorators are used to wrap API methods to apply either a constant
-or exponential timeout argument.
+These decorators are used to wrap API methods to apply either a
+Deadline-dependent (recommended), constant (DEPRECATED) or exponential
+(DEPRECATED) timeout argument.
For example, imagine an API method that can take a while to return results,
such as one that might block until a resource is ready:
@@ -54,11 +55,9 @@ def is_thing_ready(timeout=None):
from __future__ import unicode_literals
import datetime
-
-import six
+import functools
from google.api_core import datetime_helpers
-from google.api_core import general_helpers
_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
@@ -68,10 +67,79 @@ def is_thing_ready(timeout=None):
_DEFAULT_DEADLINE = None
-@six.python_2_unicode_compatible
+class TimeToDeadlineTimeout(object):
+ """A decorator that decreases timeout set for an RPC based on how much time
+ has left till its deadline. The deadline is calculated as
+ ``now + initial_timeout`` when this decorator is first called for an rpc.
+
+ In other words this decorator implements deadline semantics in terms of a
+ sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None, clock=datetime_helpers.utcnow):
+ self._timeout = timeout
+ self._clock = clock
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ first_attempt_timestamp = self._clock().timestamp()
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+
+ if self._timeout is not None:
+ # All calculations are in seconds
+ now_timestamp = self._clock().timestamp()
+
+ # To avoid usage of nonlocal but still have round timeout
+ # numbers for first attempt (in most cases the only attempt made
+ # for an RPC.
+ if now_timestamp - first_attempt_timestamp < 0.001:
+ now_timestamp = first_attempt_timestamp
+
+ time_since_first_attempt = now_timestamp - first_attempt_timestamp
+ remaining_timeout = self._timeout - time_since_first_attempt
+
+ # Although the `deadline` parameter in `google.api_core.retry.Retry`
+ # is deprecated, and should be treated the same as the `timeout`,
+ # it is still possible for the `deadline` argument in
+ # `google.api_core.retry.Retry` to be larger than the `timeout`.
+ # See https://github.com/googleapis/python-api-core/issues/654
+ # Only positive non-zero timeouts are supported.
+ # Revert back to the initial timeout for negative or 0 timeout values.
+ if remaining_timeout < 1:
+ remaining_timeout = self._timeout
+
+ kwargs["timeout"] = remaining_timeout
+
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "".format(self._timeout)
+
+
class ConstantTimeout(object):
"""A decorator that adds a constant timeout argument.
+ DEPRECATED: use ``TimeToDeadlineTimeout`` instead.
+
This is effectively equivalent to
``functools.partial(func, timeout=timeout)``.
@@ -95,7 +163,7 @@ def __call__(self, func):
Callable: The wrapped function.
"""
- @general_helpers.wraps(func)
+ @functools.wraps(func)
def func_with_timeout(*args, **kwargs):
"""Wrapped function that adds timeout."""
kwargs["timeout"] = self._timeout
@@ -140,10 +208,12 @@ def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
timeout = timeout * multiplier
-@six.python_2_unicode_compatible
class ExponentialTimeout(object):
"""A decorator that adds an exponentially increasing timeout argument.
+ DEPRECATED: the concept of incrementing timeout exponentially has been
+ deprecated. Use ``TimeToDeadlineTimeout`` instead.
+
This is useful if a function is called multiple times. Each time the
function is called this decorator will calculate a new timeout parameter
based on the the number of times the function has been called.
@@ -160,9 +230,9 @@ class ExponentialTimeout(object):
deadline (Optional[float]): The overall deadline across all
invocations. This is used to prevent a very large calculated
timeout from pushing the overall execution time over the deadline.
- This is especially useful in conjuction with
+ This is especially useful in conjunction with
:mod:`google.api_core.retry`. If ``None``, the timeouts will not
- be adjusted to accomodate an overall deadline.
+ be adjusted to accommodate an overall deadline.
"""
def __init__(
@@ -178,7 +248,7 @@ def __init__(
self._deadline = deadline
def with_deadline(self, deadline):
- """Return a copy of this teimout with the given deadline.
+ """Return a copy of this timeout with the given deadline.
Args:
deadline (float): The overall deadline across all invocations.
@@ -207,7 +277,7 @@ def __call__(self, func):
self._initial, self._maximum, self._multiplier, self._deadline
)
- @general_helpers.wraps(func)
+ @functools.wraps(func)
def func_with_timeout(*args, **kwargs):
"""Wrapped function that adds timeout."""
kwargs["timeout"] = next(timeouts)
diff --git a/google/api_core/universe.py b/google/api_core/universe.py
new file mode 100644
index 00000000..35669642
--- /dev/null
+++ b/google/api_core/universe.py
@@ -0,0 +1,82 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for universe domain."""
+
+from typing import Any, Optional
+
+DEFAULT_UNIVERSE = "googleapis.com"
+
+
+class EmptyUniverseError(ValueError):
+ def __init__(self):
+ message = "Universe Domain cannot be an empty string."
+ super().__init__(message)
+
+
+class UniverseMismatchError(ValueError):
+ def __init__(self, client_universe, credentials_universe):
+ message = (
+ f"The configured universe domain ({client_universe}) does not match the universe domain "
+ f"found in the credentials ({credentials_universe}). "
+ "If you haven't configured the universe domain explicitly, "
+ f"`{DEFAULT_UNIVERSE}` is the default."
+ )
+ super().__init__(message)
+
+
+def determine_domain(
+ client_universe_domain: Optional[str], universe_domain_env: Optional[str]
+) -> str:
+ """Return the universe domain used by the client.
+
+ Args:
+ client_universe_domain (Optional[str]): The universe domain configured via the client options.
+ universe_domain_env (Optional[str]): The universe domain configured via the
+ "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+ Returns:
+ str: The universe domain to be used by the client.
+
+ Raises:
+ ValueError: If the universe domain is an empty string.
+ """
+ universe_domain = DEFAULT_UNIVERSE
+ if client_universe_domain is not None:
+ universe_domain = client_universe_domain
+ elif universe_domain_env is not None:
+ universe_domain = universe_domain_env
+ if len(universe_domain.strip()) == 0:
+ raise EmptyUniverseError
+ return universe_domain
+
+
+def compare_domains(client_universe: str, credentials: Any) -> bool:
+ """Returns True iff the universe domains used by the client and credentials match.
+
+ Args:
+ client_universe (str): The universe domain configured via the client options.
+ credentials Any: The credentials being used in the client.
+
+ Returns:
+ bool: True iff client_universe matches the universe in credentials.
+
+ Raises:
+ ValueError: when client_universe does not match the universe in credentials.
+ """
+ credentials_universe = getattr(credentials, "universe_domain", DEFAULT_UNIVERSE)
+
+ if client_universe != credentials_universe:
+ raise UniverseMismatchError(client_universe, credentials_universe)
+ return True
diff --git a/google/api_core/version.py b/google/api_core/version.py
new file mode 100644
index 00000000..f882cac3
--- /dev/null
+++ b/google/api_core/version.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.25.0"
diff --git a/google/api_core/version_header.py b/google/api_core/version_header.py
new file mode 100644
index 00000000..cf1972ac
--- /dev/null
+++ b/google/api_core/version_header.py
@@ -0,0 +1,29 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+API_VERSION_METADATA_KEY = "x-goog-api-version"
+
+
+def to_api_version_header(version_identifier):
+ """Returns data for the API Version header for the given `version_identifier`.
+
+ Args:
+ version_identifier (str): The version identifier to be used in the
+ tuple returned.
+
+ Returns:
+ Tuple(str, str): A tuple containing the API Version metadata key and
+ value.
+ """
+ return (API_VERSION_METADATA_KEY, version_identifier)
diff --git a/noxfile.py b/noxfile.py
index 989bb9be..ac21330e 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -14,26 +14,104 @@
from __future__ import absolute_import
import os
+import pathlib
+import re
import shutil
-import sys
+import unittest
# https://github.com/google/importlab/issues/25
import nox # pytype: disable=import-error
-_MINIMAL_ASYNCIO_SUPPORT_PYTHON_VERSION = [3, 6]
+BLACK_VERSION = "black==22.3.0"
+BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
+# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
+BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
-def _greater_or_equal_than_36(version_string):
- tokens = version_string.split('.')
- for i, token in enumerate(tokens):
- try:
- tokens[i] = int(token)
- except ValueError:
- pass
- return tokens >= [3, 6]
+PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
+DEFAULT_PYTHON_VERSION = "3.10"
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-def default(session):
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "unit_grpc_gcp",
+ "unit_wo_grpc",
+ "unit_w_prerelease_deps",
+ "unit_w_async_rest_extra",
+ "cover",
+ "pytype",
+ "mypy",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint(session):
+ """Run linters.
+
+ Returns a failure if the linters find linting errors or sufficiently
+ serious code quality issues.
+ """
+ session.install("flake8", BLACK_VERSION)
+ session.install(".")
+ session.run(
+ "black",
+ "--check",
+ *BLACK_EXCLUDES,
+ *BLACK_PATHS,
+ )
+ session.run("flake8", "google", "tests")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def blacken(session):
+ """Run black.
+
+ Format code to uniform standard.
+ """
+ session.install(BLACK_VERSION)
+ session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS)
+
+
+def install_prerelease_dependencies(session, constraints_path):
+ with open(constraints_path, encoding="utf-8") as constraints_file:
+ constraints_text = constraints_file.read()
+ # Ignore leading whitespace and comment lines.
+ constraints_deps = [
+ match.group(1)
+ for match in re.finditer(
+ r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE
+ )
+ ]
+ session.install(*constraints_deps)
+ prerel_deps = [
+ "google-auth",
+ "googleapis-common-protos",
+ "grpcio",
+ "grpcio-status",
+ "proto-plus",
+ "protobuf",
+ ]
+
+ for dep in prerel_deps:
+ session.install("--pre", "--no-deps", "--upgrade", dep)
+
+ # Remaining dependencies
+ other_deps = [
+ "requests",
+ ]
+ session.install(*other_deps)
+
+
+def default(session, install_grpc=True, prerelease=False, install_async_rest=False):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
@@ -41,66 +119,144 @@ def default(session):
Python corresponding to the ``nox`` binary the ``PATH`` can
run the tests.
"""
- # Install all test dependencies, then install this package in-place.
- session.install("mock", "pytest", "pytest-cov", "grpcio >= 1.0.2")
- session.install("-e", ".")
+ if prerelease and not install_grpc:
+ unittest.skip("The pre-release session cannot be run without grpc")
+
+ session.install(
+ "dataclasses",
+ "mock; python_version=='3.7'",
+ "pytest",
+ "pytest-cov",
+ "pytest-xdist",
+ )
+
+ install_extras = []
+ if install_grpc:
+ # Note: The extra is called `grpc` and not `grpcio`.
+ install_extras.append("grpc")
+
+ constraints_dir = str(CURRENT_DIRECTORY / "testing")
+ if install_async_rest:
+ install_extras.append("async_rest")
+ constraints_type = "async-rest-"
+ else:
+ constraints_type = ""
+
+ lib_with_extras = f".[{','.join(install_extras)}]" if len(install_extras) else "."
+ if prerelease:
+ install_prerelease_dependencies(
+ session,
+ f"{constraints_dir}/constraints-{constraints_type}{PYTHON_VERSIONS[0]}.txt",
+ )
+ # This *must* be the last install command to get the package from source.
+ session.install("-e", lib_with_extras, "--no-deps")
+ else:
+ constraints_file = (
+ f"{constraints_dir}/constraints-{constraints_type}{session.python}.txt"
+ )
+ # fall back to standard constraints file
+ if not pathlib.Path(constraints_file).exists():
+ constraints_file = f"{constraints_dir}/constraints-{session.python}.txt"
+
+ session.install(
+ "-e",
+ lib_with_extras,
+ "-c",
+ constraints_file,
+ )
+
+ # Print out package versions of dependencies
+ session.run(
+ "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
+ )
+ # Support for proto.version was added in v1.23.0
+ # https://github.com/googleapis/proto-plus-python/releases/tag/v1.23.0
+ session.run(
+ "python",
+ "-c",
+ """import proto; hasattr(proto, "version") and print(proto.version.__version__)""",
+ )
+ if install_grpc:
+ session.run("python", "-c", "import grpc; print(grpc.__version__)")
+ session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
pytest_args = [
"python",
"-m",
- "py.test",
- "--quiet",
- "--cov=google.api_core",
- "--cov=tests.unit",
- "--cov-append",
- "--cov-config=.coveragerc",
- "--cov-report=",
- "--cov-fail-under=0",
- os.path.join("tests", "unit"),
+ "pytest",
+ *(
+ # Helpful for running a single test or testfile.
+ session.posargs
+ or [
+ "--quiet",
+ "--cov=google.api_core",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ # Running individual tests with parallelism enabled is usually not helpful.
+ "-n=auto",
+ os.path.join("tests", "unit"),
+ ]
+ ),
]
- pytest_args.extend(session.posargs)
- # Inject AsyncIO content, if version >= 3.6.
- if _greater_or_equal_than_36(session.python):
- session.install("asyncmock", "pytest-asyncio")
+ session.install("asyncmock", "pytest-asyncio")
+ # Having positional arguments means the user wants to run specific tests.
+ # Best not to add additional tests to that list.
+ if not session.posargs:
pytest_args.append("--cov=tests.asyncio")
pytest_args.append(os.path.join("tests", "asyncio"))
- session.run(*pytest_args)
- else:
- # Run py.test against the unit tests.
- session.run(*pytest_args)
+ session.run(*pytest_args)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
+
+@nox.session(python=PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
-def unit_grpc_gcp(session):
- """Run the unit test suite with grpcio-gcp installed."""
+@nox.session(python=PYTHON_VERSIONS)
+def unit_w_prerelease_deps(session):
+ """Run the unit test suite."""
+ default(session, prerelease=True)
+
+@nox.session(python=PYTHON_VERSIONS)
+def unit_grpc_gcp(session):
+ """
+ Run the unit test suite with grpcio-gcp installed.
+ `grpcio-gcp` doesn't support protobuf 4+.
+ Remove extra `grpcgcp` when protobuf 3.x is dropped.
+ https://github.com/googleapis/python-api-core/issues/594
+ """
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
# Install grpcio-gcp
- session.install("grpcio-gcp")
+ session.install("-e", ".[grpcgcp]", "-c", constraints_path)
+ # Install protobuf < 4.0.0
+ session.install("protobuf<4.0.0")
default(session)
-@nox.session(python="3.6")
-def lint(session):
- """Run linters.
+@nox.session(python=PYTHON_VERSIONS)
+def unit_wo_grpc(session):
+ """Run the unit test suite w/o grpcio installed"""
+ default(session, install_grpc=False)
- Returns a failure if the linters find linting errors or sufficiently
- serious code quality issues.
- """
- session.install("flake8", "flake8-import-order")
- session.install(".")
- session.run("flake8", "google", "tests")
+
+@nox.session(python=PYTHON_VERSIONS)
+def unit_w_async_rest_extra(session):
+ """Run the unit test suite with the `async_rest` extra"""
+ default(session, install_async_rest=True)
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
@@ -108,18 +264,28 @@ def lint_setup_py(session):
session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
-# No 2.7 due to https://github.com/google/importlab/issues/26.
-# No 3.7 because pytype supports up to 3.6 only.
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def pytype(session):
"""Run type-checking."""
+ session.install(".[grpc]", "pytype")
+ session.run("pytype")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def mypy(session):
+ """Run type-checking."""
+ session.install(".[grpc,async_rest]", "mypy")
session.install(
- ".", "grpcio >= 1.8.2", "grpcio-gcp >= 0.2.2", "pytype >= 2019.3.21"
+ "types-setuptools",
+ "types-requests",
+ "types-protobuf",
+ "types-dataclasses",
+ "types-mock; python_version=='3.7'",
)
- session.run("pytype")
+ session.run("mypy", "google", "tests")
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -131,13 +297,25 @@ def cover(session):
session.run("coverage", "erase")
-@nox.session(python="3.7")
+@nox.session(python="3.10")
def docs(session):
"""Build the docs for this library."""
- session.install(".", "grpcio >= 1.8.2", "grpcio-gcp >= 0.2.2")
- session.install("-e", ".")
- session.install("sphinx < 3.0", "alabaster", "recommonmark")
+ session.install("-e", ".[grpc]")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "sphinx==4.5.0",
+ "alabaster",
+ "recommonmark",
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -152,3 +330,49 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python="3.10")
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "gcp-sphinx-docfx-yaml",
+ "alabaster",
+ "recommonmark",
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/synth.py b/owlbot.py
similarity index 62%
rename from synth.py
rename to owlbot.py
index 1e8abc95..58bc7517 100644
--- a/synth.py
+++ b/owlbot.py
@@ -14,15 +14,27 @@
"""This script is used to synthesize generated parts of this library."""
-import re
-
import synthtool as s
from synthtool import gcp
+from synthtool.languages import python
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=100)
-s.move(templated_files, excludes=["noxfile.py", ".flake8", ".coveragerc", "setup.cfg"])
\ No newline at end of file
+excludes = [
+ "noxfile.py", # pytype
+ "setup.cfg", # pytype
+ ".coveragerc", # layout
+ "CONTRIBUTING.rst", # no systests
+ ".github/workflows/unittest.yml", # exclude unittest gh action
+ ".github/workflows/lint.yml", # exclude lint gh action
+ "README.rst",
+]
+templated_files = common.py_library(microgenerator=True, cov_level=100)
+s.move(templated_files, excludes=excludes)
+
+python.configure_previous_major_version_branches()
+
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..da404ab3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,107 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[build-system]
+requires = ["setuptools"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "google-api-core"
+authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }]
+license = { text = "Apache 2.0" }
+requires-python = ">=3.7"
+readme = "README.rst"
+description = "Google API client core library"
+classifiers = [
+ # Should be one of:
+ # "Development Status :: 3 - Alpha"
+ # "Development Status :: 4 - Beta"
+ # "Development Status :: 5 - Production/Stable"
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Operating System :: OS Independent",
+ "Topic :: Internet",
+]
+dependencies = [
+ "googleapis-common-protos >= 1.56.2, < 2.0.0",
+ "protobuf >= 3.19.5, < 7.0.0, != 3.20.0, != 3.20.1, != 4.21.0, != 4.21.1, != 4.21.2, != 4.21.3, != 4.21.4, != 4.21.5",
+ "proto-plus >= 1.22.3, < 2.0.0",
+ "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'",
+ "google-auth >= 2.14.1, < 3.0.0",
+ "requests >= 2.18.0, < 3.0.0",
+]
+dynamic = ["version"]
+
+[project.urls]
+Documentation = "https://googleapis.dev/python/google-api-core/latest/"
+Repository = "https://github.com/googleapis/python-api-core"
+
+[project.optional-dependencies]
+async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.0"]
+grpc = [
+ "grpcio >= 1.33.2, < 2.0.0",
+ "grpcio >= 1.49.1, < 2.0.0; python_version >= '3.11'",
+ "grpcio-status >= 1.33.2, < 2.0.0",
+ "grpcio-status >= 1.49.1, < 2.0.0; python_version >= '3.11'",
+]
+grpcgcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"]
+grpcio-gcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"]
+
+[tool.setuptools.dynamic]
+version = { attr = "google.api_core.version.__version__" }
+
+[tool.setuptools.packages.find]
+# Only include packages under the 'google' namespace. Do not include tests,
+# benchmarks, etc.
+include = ["google*"]
+
+[tool.mypy]
+python_version = "3.7"
+namespace_packages = true
+ignore_missing_imports = true
+
+[tool.pytest]
+filterwarnings = [
+ # treat all warnings as errors
+ "error",
+ # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed
+ "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning",
+ # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
+ "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning",
+ # Remove once support for python 3.7 is dropped
+ # This warning only appears when using python 3.7
+ "ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning",
+ # Remove once support for grpcio-gcp is deprecated
+ # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45
+ "ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning",
+ "ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ "ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ # Remove once the minimum supported version of googleapis-common-protos is 1.62.0
+ "ignore:.*pkg_resources.declare_namespace:DeprecationWarning",
+ "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning",
+ # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published)
+ "ignore:There is no current event loop:DeprecationWarning",
+ # Remove after support for Python 3.7 is dropped
+ "ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning",
+]
diff --git a/renovate.json b/renovate.json
index 4fa94931..c7875c46 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,12 @@
{
"extends": [
- "config:base", ":preserveSemverRanges"
- ]
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
+ ],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
+ "pip_requirements": {
+ "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+ }
}
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb2..120b0ddc 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright 2015 Google Inc. All rights reserved.
+# Copyright 2024 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
index d309d6e9..8f5e248a 100644
--- a/scripts/readme-gen/readme_gen.py
+++ b/scripts/readme-gen/readme_gen.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Copyright 2016 Google Inc
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -28,19 +28,22 @@
jinja_env = jinja2.Environment(
trim_blocks=True,
loader=jinja2.FileSystemLoader(
- os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+ os.path.abspath(os.path.join(os.path.dirname(__file__), "templates"))
+ ),
+ autoescape=True,
+)
-README_TMPL = jinja_env.get_template('README.tmpl.rst')
+README_TMPL = jinja_env.get_template("README.tmpl.rst")
def get_help(file):
- return subprocess.check_output(['python', file, '--help']).decode()
+ return subprocess.check_output(["python", file, "--help"]).decode()
def main():
parser = argparse.ArgumentParser()
- parser.add_argument('source')
- parser.add_argument('--destination', default='README.rst')
+ parser.add_argument("source")
+ parser.add_argument("--destination", default="README.rst")
args = parser.parse_args()
@@ -48,9 +51,9 @@ def main():
root = os.path.dirname(source)
destination = os.path.join(root, args.destination)
- jinja_env.globals['get_help'] = get_help
+ jinja_env.globals["get_help"] = get_help
- with io.open(source, 'r') as f:
+ with io.open(source, "r") as f:
config = yaml.load(f)
# This allows get_help to execute in the right directory.
@@ -58,9 +61,9 @@ def main():
output = README_TMPL.render(config)
- with io.open(destination, 'w') as f:
+ with io.open(destination, "w") as f:
f.write(output)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
index a0406dba..6f069c6c 100644
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -12,7 +12,7 @@ Install Dependencies
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+#. Create a virtualenv. Samples are compatible with Python 3.7+.
.. code-block:: bash
diff --git a/setup.cfg b/setup.cfg
index 5c32e166..f7b5a3bc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,12 +1,9 @@
-[bdist_wheel]
-universal = 1
-
[pytype]
-python_version = 3.6
+python_version = 3.7
inputs =
google/
exclude =
tests/
-output = pytype_output/
+output = .pytype/
# Workaround for https://github.com/google/pytype/issues/150
disable = pyi-error
diff --git a/setup.py b/setup.py
index 6fa56b3c..168877fa 100644
--- a/setup.py
+++ b/setup.py
@@ -12,88 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import io
-import os
-
import setuptools
-# Package metadata.
-
-name = "google-api-core"
-description = "Google API client core library"
-version = "1.22.1"
-# Should be one of:
-# 'Development Status :: 3 - Alpha'
-# 'Development Status :: 4 - Beta'
-# 'Development Status :: 5 - Production/Stable'
-release_status = "Development Status :: 5 - Production/Stable"
-dependencies = [
- "googleapis-common-protos >= 1.6.0, < 2.0dev",
- "protobuf >= 3.12.0",
- "google-auth >= 1.19.1, < 2.0dev",
- "requests >= 2.18.0, < 3.0.0dev",
- "setuptools >= 34.0.0",
- "six >= 1.10.0",
- "pytz",
- 'futures >= 3.2.0; python_version < "3.2"',
-]
-extras = {
- "grpc": "grpcio >= 1.29.0, < 2.0dev",
- "grpcgcp": "grpcio-gcp >= 0.2.2",
- "grpcio-gcp": "grpcio-gcp >= 0.2.2",
-}
-
-
-# Setup boilerplate below this line.
-
-package_root = os.path.abspath(os.path.dirname(__file__))
-
-readme_filename = os.path.join(package_root, "README.rst")
-with io.open(readme_filename, encoding="utf-8") as readme_file:
- readme = readme_file.read()
-
-# Only include packages under the 'google' namespace. Do not include tests,
-# benchmarks, etc.
-packages = [
- package for package in setuptools.find_packages() if package.startswith("google")
-]
-
-# Determine which namespaces are needed.
-namespaces = ["google"]
-if "google.cloud" in packages:
- namespaces.append("google.cloud")
-
-
-setuptools.setup(
- name=name,
- version=version,
- description=description,
- long_description=readme,
- author="Google LLC",
- author_email="googleapis-packages@google.com",
- license="Apache 2.0",
- url="https://github.com/googleapis/python-api-core",
- classifiers=[
- release_status,
- "Intended Audience :: Developers",
- "License :: OSI Approved :: Apache Software License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Operating System :: OS Independent",
- "Topic :: Internet",
- ],
- platforms="Posix; MacOS X; Windows",
- packages=packages,
- namespace_packages=namespaces,
- install_requires=dependencies,
- extras_require=extras,
- python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
- include_package_data=True,
- zip_safe=False,
-)
+setuptools.setup()
diff --git a/synth.metadata b/synth.metadata
deleted file mode 100644
index 95ac3553..00000000
--- a/synth.metadata
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "sources": [
- {
- "git": {
- "name": ".",
- "remote": "https://github.com/googleapis/python-api-core.git",
- "sha": "c890675dc9ebc084f105be81dc81c048f4f599ea"
- }
- },
- {
- "git": {
- "name": "synthtool",
- "remote": "https://github.com/googleapis/synthtool.git",
- "sha": "303271797a360f8a439203413f13a160f2f5b3b4"
- }
- }
- ]
-}
\ No newline at end of file
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 00000000..4ce1c899
--- /dev/null
+++ b/testing/constraints-3.7.txt
@@ -0,0 +1,15 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+googleapis-common-protos==1.56.2
+protobuf==3.19.5
+google-auth==2.14.1
+requests==2.18.0
+grpcio==1.33.2
+grpcio-status==1.33.2
+grpcio-gcp==0.2.2
+proto-plus==1.22.3
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 00000000..1b5bb58e
--- /dev/null
+++ b/testing/constraints-3.8.txt
@@ -0,0 +1,2 @@
+googleapis-common-protos==1.56.3
+protobuf==4.21.6
\ No newline at end of file
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-async-rest-3.7.txt b/testing/constraints-async-rest-3.7.txt
new file mode 100644
index 00000000..7aedeb1c
--- /dev/null
+++ b/testing/constraints-async-rest-3.7.txt
@@ -0,0 +1,17 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+googleapis-common-protos==1.56.2
+protobuf==3.19.5
+google-auth==2.35.0
+# from google-auth[aiohttp]
+aiohttp==3.6.2
+requests==2.20.0
+grpcio==1.33.2
+grpcio-status==1.33.2
+grpcio-gcp==0.2.2
+proto-plus==1.22.3
diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py
index 3322cb05..659f41cf 100644
--- a/tests/asyncio/future/test_async_future.py
+++ b/tests/asyncio/future/test_async_future.py
@@ -13,8 +13,8 @@
# limitations under the License.
import asyncio
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions
@@ -47,7 +47,6 @@ async def test_polling_future_constructor():
@pytest.mark.asyncio
async def test_set_result():
future = AsyncFuture()
- callback = mock.Mock()
future.set_result(1)
@@ -125,7 +124,6 @@ async def test_result_with_polling():
class AsyncFutureTimeout(AsyncFutureWithPoll):
-
async def done(self):
await asyncio.sleep(0.2)
return False
diff --git a/tests/asyncio/gapic/test_config_async.py b/tests/asyncio/gapic/test_config_async.py
index 1f6ea9e2..dbb05d5e 100644
--- a/tests/asyncio/gapic/test_config_async.py
+++ b/tests/asyncio/gapic/test_config_async.py
@@ -12,6 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core.gapic_v1 import config_async
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
index 7318362b..cc4e7de8 100644
--- a/tests/asyncio/gapic/test_method_async.py
+++ b/tests/asyncio/gapic/test_method_async.py
@@ -14,12 +14,23 @@
import datetime
-from grpc.experimental import aio
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
-from google.api_core import (exceptions, gapic_v1, grpc_helpers_async,
- retry_async, timeout)
+try:
+ from grpc import aio, Compression
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import retry_async
+from google.api_core import timeout
def _utcnow_monotonic():
@@ -55,9 +66,7 @@ async def test_wrap_method_with_no_client_info():
fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, client_info=None
- )
+ wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=None)
await wrapped_method(1, 2, meep="moop")
@@ -72,13 +81,12 @@ async def test_wrap_method_with_custom_client_info():
api_core_version=3,
gapic_version=4,
client_library_version=5,
+ protobuf_runtime_version=6,
)
fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, client_info=client_info
- )
+ wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=client_info)
await wrapped_method(1, 2, meep="moop")
@@ -89,6 +97,35 @@ async def test_wrap_method_with_custom_client_info():
assert client_info.to_grpc_metadata() in metadata
+@pytest.mark.asyncio
+async def test_wrap_method_with_no_compression():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method)
+
+ await wrapped_method(1, 2, meep="moop", compression=None)
+
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_with_custom_compression():
+ compression = Compression.Gzip
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_compression=compression
+ )
+
+ await wrapped_method(1, 2, meep="moop", compression=Compression.Deflate)
+
+ method.assert_called_once_with(
+ 1, 2, meep="moop", metadata=mock.ANY, compression=Compression.Deflate
+ )
+
+
@pytest.mark.asyncio
async def test_invoke_wrapped_method_with_metadata():
fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
@@ -123,107 +160,86 @@ async def test_invoke_wrapped_method_with_metadata_as_none():
@mock.patch("asyncio.sleep")
@pytest.mark.asyncio
-async def test_wrap_method_with_default_retry_and_timeout(unused_sleep):
+async def test_wrap_method_with_default_retry_timeout_and_compression(unused_sleep):
fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, side_effect=[
- exceptions.InternalServerError(None),
- fake_call,
- ])
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.InternalServerError(None), fake_call],
+ )
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method()
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("asyncio.sleep")
@pytest.mark.asyncio
async def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, side_effect=[
- exceptions.InternalServerError(None),
- fake_call,
- ])
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.InternalServerError(None), fake_call],
+ )
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method(
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=Compression.Gzip, metadata=mock.ANY
+ )
@mock.patch("asyncio.sleep")
@pytest.mark.asyncio
-async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep):
+async def test_wrap_method_with_overriding_retry_timeout_and_compression(unused_sleep):
fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, side_effect=[
- exceptions.NotFound(None),
- fake_call,
- ])
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.NotFound(None), fake_call],
+ )
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method(
- retry=retry_async.AsyncRetry(retry_async.if_exception_type(exceptions.NotFound)),
+ retry=retry_async.AsyncRetry(
+ retry_async.if_exception_type(exceptions.NotFound)
+ ),
timeout=timeout.ConstantTimeout(22),
+ compression=Compression.Deflate,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=22, metadata=mock.ANY)
-
-
-@mock.patch("asyncio.sleep")
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- side_effect=_utcnow_monotonic(),
- autospec=True,
-)
-@pytest.mark.asyncio
-async def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(
- spec=aio.UnaryUnaryMultiCallable,
- side_effect=([exceptions.InternalServerError(None)] * 4) + [fake_call])
-
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ExponentialTimeout(deadline=60)
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
- )
-
- # Overriding only the retry's deadline should also override the timeout's
- # deadline.
- result = await wrapped_method(retry=default_retry.with_deadline(30))
-
- assert result == 42
- timeout_args = [call[1]["timeout"] for call in method.call_args_list]
- assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
- assert utcnow.call_count == (
- 1
- + 1 # Compute wait_for timeout in retry_async
- + 5 # First to set the deadline.
- + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ method.assert_called_with(
+ timeout=22, compression=Compression.Deflate, metadata=mock.ANY
)
@@ -241,3 +257,14 @@ async def test_wrap_method_with_overriding_timeout_as_a_number():
assert result == 42
method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_without_wrap_errors():
+ fake_call = mock.AsyncMock()
+
+ wrapped_method = gapic_v1.method_async.wrap_method(fake_call, kind="rest")
+ with mock.patch("google.api_core.grpc_helpers_async.wrap_errors") as method:
+ await wrapped_method()
+
+ method.assert_not_called()
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
index 0f9363ff..e5b20dcd 100644
--- a/tests/asyncio/operations_v1/test_operations_async_client.py
+++ b/tests/asyncio/operations_v1/test_operations_async_client.py
@@ -12,12 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from grpc.experimental import aio
-import mock
+from unittest import mock
+
import pytest
-from google.api_core import (grpc_helpers_async, operations_v1,
- page_iterator_async)
+try:
+ from grpc import aio, Compression
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import grpc_helpers_async
+from google.api_core import operations_v1
+from google.api_core import page_iterator_async
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
@@ -33,12 +39,20 @@ def _mock_grpc_objects(response):
@pytest.mark.asyncio
async def test_get_operation():
mocked_channel, method, fake_call = _mock_grpc_objects(
- operations_pb2.Operation(name="meep"))
+ operations_pb2.Operation(name="meep")
+ )
client = operations_v1.OperationsAsyncClient(mocked_channel)
- response = await client.get_operation("name")
+ response = await client.get_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
assert response == fake_call.response
@@ -53,7 +67,9 @@ async def test_list_operations():
mocked_channel, method, fake_call = _mock_grpc_objects(list_response)
client = operations_v1.OperationsAsyncClient(mocked_channel)
- pager = await client.list_operations("name", "filter")
+ pager = await client.list_operations(
+ "name", "filter", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert isinstance(pager, page_iterator_async.AsyncIterator)
responses = []
@@ -63,6 +79,11 @@ async def test_list_operations():
assert responses == operations
assert method.call_count == 1
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
request = tuple(method.call_args_list[0])[0][0]
assert isinstance(request, operations_pb2.ListOperationsRequest)
assert request.name == "name"
@@ -71,23 +92,35 @@ async def test_list_operations():
@pytest.mark.asyncio
async def test_delete_operation():
- mocked_channel, method, fake_call = _mock_grpc_objects(
- empty_pb2.Empty())
+ mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
client = operations_v1.OperationsAsyncClient(mocked_channel)
- await client.delete_operation("name")
+ await client.delete_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
@pytest.mark.asyncio
async def test_cancel_operation():
- mocked_channel, method, fake_call = _mock_grpc_objects(
- empty_pb2.Empty())
+ mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
client = operations_v1.OperationsAsyncClient(mocked_channel)
- await client.cancel_operation("name")
+ await client.cancel_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py
new file mode 100644
index 00000000..e44f5361
--- /dev/null
+++ b/tests/asyncio/retry/test_retry_streaming_async.py
@@ -0,0 +1,601 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import datetime
+import re
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import retry_async
+from google.api_core.retry import retry_streaming_async
+
+from ...unit.retry.test_retry_base import Test_BaseRetry
+
+
+@pytest.mark.asyncio
+async def test_retry_streaming_target_bad_sleep_generator():
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ with pytest.raises(ValueError, match="Sleep generator"):
+ await retry_target_stream(None, lambda x: True, [], None).__anext__()
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_retry_streaming_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ from functools import partial
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ error_target = partial(TestAsyncStreamingRetry._generator_mock, error_on=0)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ await retry_target_stream(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
+ ).__anext__()
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
+
+
+class TestAsyncStreamingRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs)
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r", "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ @staticmethod
+ async def _generator_mock(
+ num=5,
+ error_on=None,
+ exceptions_seen=None,
+ sleep_time=0,
+ ):
+ """
+ Helper to create a mock generator that yields a number of values
+ Generator can optionally raise an exception on a specific iteration
+
+ Args:
+ - num (int): the number of values to yield
+ - error_on (int): if given, the generator will raise a ValueError on the specified iteration
+ - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
+ - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value
+ """
+ try:
+ for i in range(num):
+ if sleep_time:
+ await asyncio.sleep(sleep_time)
+ if error_on is not None and i == error_on:
+ raise ValueError("generator mock error")
+ yield i
+ except (Exception, BaseException, GeneratorExit) as e:
+ # keep track of exceptions seen by generator
+ if exceptions_seen is not None:
+ exceptions_seen.append(e)
+ raise
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_success(self, sleep):
+ """
+ Test that a retry-decorated generator yields values as expected
+ This test checks a generator with no issues
+ """
+ from collections.abc import AsyncGenerator
+
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+
+ num = 10
+ generator = await decorated(num)
+ # check types
+ assert isinstance(generator, AsyncGenerator)
+ assert isinstance(self._generator_mock(num), AsyncGenerator)
+ # check yield contents
+ unpacked = [i async for i in generator]
+ assert len(unpacked) == num
+ expected = [i async for i in self._generator_mock(num)]
+ for a, b in zip(unpacked, expected):
+ assert a == b
+ sleep.assert_not_called()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_retry(self, sleep):
+ """
+ Tests that a retry-decorated generator will retry on errors
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ on_error=on_error,
+ predicate=retry_async.if_exception_type(ValueError),
+ timeout=None,
+ )
+ generator = await retry_(self._generator_mock)(error_on=3)
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [await generator.__anext__() for i in range(10)]
+ assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
+ assert on_error.call_count == 3
+ await generator.aclose()
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.parametrize("use_deadline_arg", [True, False])
+ @pytest.mark.asyncio
+ async def test___call___generator_retry_hitting_timeout(
+ self, sleep, uniform, use_deadline_arg
+ ):
+ """
+ Tests that a retry-decorated generator will throw a RetryError
+ after using the time budget
+ """
+ import time
+
+ timeout_val = 9.9
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val}
+ if not use_deadline_arg
+ else {"deadline": timeout_val}
+ )
+
+ on_error = mock.Mock()
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=retry_async.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ **timeout_kwarg,
+ )
+
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ decorated = retry_(self._generator_mock, on_error=on_error)
+ generator = await decorated(error_on=1)
+
+ with now_patcher as patched_now:
+ # Make sure that calls to fake asyncio.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_now.return_value += sleep_delay
+
+ sleep.side_effect = increase_time
+
+ with pytest.raises(exceptions.RetryError):
+ [i async for i in generator]
+
+ assert on_error.call_count == 4
+ # check the delays
+ assert sleep.call_count == 3 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+ # next wait would have put us over, so ended early
+ assert last_wait == 4
+ assert total_wait == 7
+
+ @pytest.mark.asyncio
+ async def test___call___generator_cancellations(self):
+ """
+ cancel calls should propagate to the generator
+ """
+ # test without cancel as retryable
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ utcnow = datetime.datetime.now(datetime.timezone.utc)
+ mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow)
+ generator = await retry_(self._generator_mock)(sleep_time=0.2)
+ assert await generator.__anext__() == 0
+ task = asyncio.create_task(generator.__anext__())
+ task.cancel()
+ with pytest.raises(asyncio.CancelledError):
+ await task
+ with pytest.raises(StopAsyncIteration):
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_send(self, sleep):
+ """
+ Send should be passed through retry into target generator
+ """
+
+ async def _mock_send_gen():
+ """
+ always yield whatever was sent in
+ """
+ in_ = yield
+ while True:
+ in_ = yield in_
+
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ decorated = retry_(_mock_send_gen)
+
+ generator = await decorated()
+ result = await generator.__anext__()
+ # first yield should be None
+ assert result is None
+ in_messages = ["test_1", "hello", "world"]
+ out_messages = []
+ for msg in in_messages:
+ recv = await generator.asend(msg)
+ out_messages.append(recv)
+ assert in_messages == out_messages
+ await generator.aclose()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_send_retry(self, sleep):
+ """
+ Send should be retried if target generator raises an error
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ on_error=on_error,
+ predicate=retry_async.if_exception_type(ValueError),
+ timeout=None,
+ )
+ generator = await retry_(self._generator_mock)(error_on=3)
+ with pytest.raises(TypeError) as exc_info:
+ await generator.asend("cannot send to fresh generator")
+ assert exc_info.match("can't send non-None value")
+ await generator.aclose()
+
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ generator = await retry_(self._generator_mock)(error_on=3)
+ assert await generator.__anext__() == 0
+ unpacked = [await generator.asend(i) for i in range(10)]
+ assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
+ assert on_error.call_count == 3
+ await generator.aclose()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ await generator.aclose()
+
+ assert isinstance(exception_list[0], GeneratorExit)
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_new_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator,
+ even when it hasn't been iterated yet
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ await generator.aclose()
+
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_throw(self, sleep):
+ """
+ Throw should be passed through retry into target generator
+ """
+
+ # The generator should not retry when it encounters a non-retryable error
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=retry_async.if_exception_type(ValueError),
+ )
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ with pytest.raises(BufferError):
+ await generator.athrow(BufferError("test"))
+ assert isinstance(exception_list[0], BufferError)
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ # In contrast, the generator should retry if we throw a retryable exception
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ throw_val = await generator.athrow(ValueError("test"))
+ assert throw_val == 0
+ assert isinstance(exception_list[0], ValueError)
+ # calling next on generator should not raise error, because it was retried
+ assert await generator.__anext__() == 1
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_send(self, sleep, awaitable_wrapped):
+ """
+ Send should work like next if the wrapped iterable does not support it
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ retryable = await decorated()
+ # initiate the generator by calling next
+ result = await retryable.__anext__()
+ assert result == 0
+ # test sending values
+ assert await retryable.asend("test") == 1
+ assert await retryable.asend("test2") == 2
+ assert await retryable.asend("test3") == 3
+ await retryable.aclose()
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_close(self, sleep, awaitable_wrapped):
+ """
+ close should be handled by wrapper if wrapped iterable does not support it
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ # try closing active generator
+ retryable = await decorated()
+ assert await retryable.__anext__() == 0
+ await retryable.aclose()
+ with pytest.raises(StopAsyncIteration):
+ await retryable.__anext__()
+ # try closing new generator
+ new_retryable = await decorated()
+ await new_retryable.aclose()
+ with pytest.raises(StopAsyncIteration):
+ await new_retryable.__anext__()
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped):
+ """
+ Throw should work even if the wrapped iterable does not support it
+ """
+
+ predicate = retry_async.if_exception_type(ValueError)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate)
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ # try throwing with active generator
+ retryable = await decorated()
+ assert await retryable.__anext__() == 0
+ # should swallow errors in predicate
+ await retryable.athrow(ValueError("test"))
+ # should raise errors not in predicate
+ with pytest.raises(BufferError):
+ await retryable.athrow(BufferError("test"))
+ with pytest.raises(StopAsyncIteration):
+ await retryable.__anext__()
+ # try throwing with new generator
+ new_retryable = await decorated()
+ with pytest.raises(BufferError):
+ await new_retryable.athrow(BufferError("test"))
+ with pytest.raises(StopAsyncIteration):
+ await new_retryable.__anext__()
+
+ @pytest.mark.asyncio
+ async def test_exc_factory_non_retryable_error(self):
+ """
+ generator should give the option to override exception creation logic
+ test when non-retryable error is thrown
+ """
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ timeout = 6
+ sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry_async.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize the generator
+ await generator.__anext__()
+ # trigger some retryable errors
+ await generator.athrow(sent_errors[0])
+ await generator.athrow(sent_errors[1])
+ # trigger a non-retryable error
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ await generator.athrow(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
+
+ @pytest.mark.asyncio
+ async def test_exc_factory_timeout(self):
+ """
+ generator should give the option to override exception creation logic
+ test when timeout is exceeded
+ """
+ import time
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ timeout = 2
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ with now_patcher as patched_now:
+ timeout = 2
+ sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.TIMEOUT
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry_async.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize the generator
+ await generator.__anext__()
+ # trigger some retryable errors
+ await generator.athrow(sent_errors[0])
+ await generator.athrow(sent_errors[1])
+ # trigger a timeout
+ patched_now.return_value += timeout + 1
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ await generator.athrow(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/retry/test_retry_unary_async.py
similarity index 61%
rename from tests/asyncio/test_retry_async.py
rename to tests/asyncio/retry/test_retry_unary_async.py
index 8f863668..e7fdc963 100644
--- a/tests/asyncio/test_retry_async.py
+++ b/tests/asyncio/retry/test_retry_unary_async.py
@@ -15,12 +15,18 @@
import datetime
import re
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import exceptions
from google.api_core import retry_async
+from ...unit.retry.test_retry_base import Test_BaseRetry
+
@mock.patch("asyncio.sleep", autospec=True)
@mock.patch(
@@ -66,7 +72,9 @@ async def target():
on_error = mock.Mock()
- result = await retry_async.retry_target(target, predicate, range(10), None, on_error=on_error)
+ result = await retry_async.retry_target(
+ target, predicate, range(10), None, on_error=on_error
+ )
assert result == 42
assert call_count["target"] == 3
@@ -95,139 +103,72 @@ async def test_retry_target_non_retryable_error(utcnow, sleep):
@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
+@mock.patch("time.monotonic", autospec=True)
+@pytest.mark.parametrize("use_deadline_arg", [True, False])
@pytest.mark.asyncio
-async def test_retry_target_deadline_exceeded(utcnow, sleep):
+async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
predicate = retry_async.if_exception_type(ValueError)
exception = ValueError("meep")
target = mock.Mock(side_effect=exception)
# Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the deadline.
- utcnow.side_effect = [
- # The first call to utcnow establishes the start of the timeline.
- datetime.datetime.min,
- datetime.datetime.min + datetime.timedelta(seconds=5),
- datetime.datetime.min + datetime.timedelta(seconds=11),
- ]
+ # call takes 6, which puts the retry over the timeout.
+ monotonic.side_effect = [0, 5, 11]
+
+ timeout_val = 10
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val}
+ )
with pytest.raises(exceptions.RetryError) as exc_info:
- await retry_async.retry_target(target, predicate, range(10), deadline=10)
+ await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg)
assert exc_info.value.cause == exception
- assert exc_info.match("Deadline of 10.0s exceeded")
+ assert exc_info.match("Timeout of 10.0s exceeded")
assert exc_info.match("last exception: meep")
assert target.call_count == 2
+ # Ensure the exception message does not include the target fn:
+ # it may be a partial with user data embedded
+ assert str(target) not in exc_info.exconly()
+
@pytest.mark.asyncio
async def test_retry_target_bad_sleep_generator():
with pytest.raises(ValueError, match="Sleep generator"):
- await retry_async.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
-
+ await retry_async.retry_target(mock.sentinel.target, lambda x: True, [], None)
-class TestAsyncRetry:
-
- def test_constructor_defaults(self):
- retry_ = retry_async.AsyncRetry()
- assert retry_._predicate == retry_async.if_transient_error
- assert retry_._initial == 1
- assert retry_._maximum == 60
- assert retry_._multiplier == 2
- assert retry_._deadline == 120
- assert retry_._on_error is None
-
- def test_constructor_options(self):
- _some_function = mock.Mock()
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=_some_function,
- )
- assert retry_._predicate == mock.sentinel.predicate
- assert retry_._initial == 1
- assert retry_._maximum == 2
- assert retry_._multiplier == 3
- assert retry_._deadline == 4
- assert retry_._on_error is _some_function
-
- def test_with_deadline(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_deadline(42)
- assert retry_ is not new_retry
- assert new_retry._deadline == 42
-
- # the rest of the attributes should remain the same
- assert new_retry._predicate is retry_._predicate
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_predicate(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_predicate(mock.sentinel.predicate)
- assert retry_ is not new_retry
- assert new_retry._predicate == mock.sentinel.predicate
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_noop(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_retry_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ exception = ValueError("trigger retry")
+ error_target = mock.Mock(side_effect=exception)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ await retry_async.retry_target(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
)
- new_retry = retry_.with_delay()
- assert retry_ is not new_retry
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
- def test_with_delay(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 3
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
+class TestAsyncRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_async.AsyncRetry(*args, **kwargs)
def test___str__(self):
def if_exception_type(exc):
@@ -240,13 +181,13 @@ def if_exception_type(exc):
initial=1.0,
maximum=60.0,
multiplier=2.0,
- deadline=120.0,
+ timeout=120.0,
on_error=None,
)
assert re.match(
(
r", "
- r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
r"on_error=None>"
),
str(retry_),
@@ -269,14 +210,14 @@ async def test___call___and_execute_success(self, sleep):
target.assert_called_once_with("meep")
sleep.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
async def test___call___and_execute_retry(self, sleep, uniform):
-
on_error = mock.Mock(spec=["__call__"], side_effect=[None])
- retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError))
+ retry_ = retry_async.AsyncRetry(
+ predicate=retry_async.if_exception_type(ValueError)
+ )
target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError(), 42])
# __name__ is needed by functools.partial.
@@ -293,25 +234,20 @@ async def test___call___and_execute_retry(self, sleep, uniform):
sleep.assert_called_once_with(retry_._initial)
assert on_error.call_count == 1
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
- async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
-
+ async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
retry_ = retry_async.AsyncRetry(
predicate=retry_async.if_exception_type(ValueError),
initial=1.0,
maximum=1024.0,
multiplier=2.0,
- deadline=9.9,
+ timeout=30.9,
)
- utcnow = datetime.datetime.utcnow()
- utcnow_patcher = mock.patch(
- "google.api_core.datetime_helpers.utcnow", return_value=utcnow
- )
+ monotonic_patcher = mock.patch("time.monotonic", return_value=0)
target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10)
# __name__ is needed by functools.partial.
@@ -320,11 +256,12 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform)
decorated = retry_(target, on_error=on_error)
target.assert_not_called()
- with utcnow_patcher as patched_utcnow:
+ with monotonic_patcher as patched_monotonic:
# Make sure that calls to fake asyncio.sleep() also advance the mocked
# time clock.
def increase_time(sleep_delay):
- patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
+ patched_monotonic.return_value += sleep_delay
+
sleep.side_effect = increase_time
with pytest.raises(exceptions.RetryError):
@@ -339,8 +276,17 @@ def increase_time(sleep_delay):
last_wait = sleep.call_args.args[0]
total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
- assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
- assert total_wait == 9.9 # the same as the deadline
+ assert last_wait == 8.0
+ # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
+ # we do not even wait for it to be scheduled (30.9 is configured timeout).
+ # This changes the previous logic of shortening the last attempt to fit
+ # in the timeout. The previous logic was removed to make Python retry
+ # logic consistent with the other languages and to not disrupt the
+ # randomized retry delays distribution by artificially increasing a
+ # probability of scheduling two (instead of one) last attempts with very
+ # short delay between them, while the second retry having very low chance
+ # of succeeding anyways.
+ assert total_wait == 15.0
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
@@ -366,8 +312,7 @@ async def test___init___without_retry_executed(self, sleep):
sleep.assert_not_called()
_some_function.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
async def test___init___when_retry_is_executed(self, sleep, uniform):
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
index 924a74ca..aa8d5d10 100644
--- a/tests/asyncio/test_grpc_helpers_async.py
+++ b/tests/asyncio/test_grpc_helpers_async.py
@@ -12,10 +12,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import grpc
-from grpc.experimental import aio
-import mock
-import pytest
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+import pytest # noqa: I202
+
+try:
+ import grpc
+ from grpc import aio
+except ImportError: # pragma: NO COVER
+ grpc = aio = None
+
+
+if grpc is None: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import grpc_helpers_async
@@ -33,6 +46,9 @@ def code(self):
def details(self):
return None
+ def trailing_metadata(self):
+ return None
+
@pytest.mark.asyncio
async def test_wrap_unary_errors():
@@ -85,12 +101,40 @@ async def test_common_methods_in_wrapped_call():
assert mock_call.wait_for_connection.call_count == 1
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "callable_type,expected_wrapper_type",
+ [
+ (grpc.aio.UnaryStreamMultiCallable, grpc_helpers_async._WrappedUnaryStreamCall),
+ (grpc.aio.StreamUnaryMultiCallable, grpc_helpers_async._WrappedStreamUnaryCall),
+ (
+ grpc.aio.StreamStreamMultiCallable,
+ grpc_helpers_async._WrappedStreamStreamCall,
+ ),
+ ],
+)
+async def test_wrap_errors_w_stream_type(callable_type, expected_wrapper_type):
+ class ConcreteMulticallable(callable_type):
+ def __call__(self, *args, **kwargs):
+ raise NotImplementedError("Should not be called")
+
+ with mock.patch.object(
+ grpc_helpers_async, "_wrap_stream_errors"
+ ) as wrap_stream_errors:
+ callable_ = ConcreteMulticallable()
+ grpc_helpers_async.wrap_errors(callable_)
+ assert wrap_stream_errors.call_count == 1
+ wrap_stream_errors.assert_called_once_with(callable_, expected_wrapper_type)
+
+
@pytest.mark.asyncio
async def test_wrap_stream_errors_unary_stream():
mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedUnaryStreamCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -102,7 +146,9 @@ async def test_wrap_stream_errors_stream_unary():
mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamUnaryCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -114,24 +160,15 @@ async def test_wrap_stream_errors_stream_stream():
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
assert mock_call.wait_for_connection.call_count == 1
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_type_error():
- mock_call = mock.Mock()
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
-
- with pytest.raises(TypeError):
- await wrapped_callable()
-
-
@pytest.mark.asyncio
async def test_wrap_stream_errors_raised():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
@@ -139,7 +176,9 @@ async def test_wrap_stream_errors_raised():
mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error])
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
with pytest.raises(exceptions.InvalidArgument):
await wrapped_callable()
@@ -154,7 +193,9 @@ async def test_wrap_stream_errors_read():
mock_call.read = mock.AsyncMock(side_effect=grpc_error)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -170,12 +211,16 @@ async def test_wrap_stream_errors_aiter():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mocked_aiter = mock.Mock(spec=['__anext__'])
- mocked_aiter.__anext__ = mock.AsyncMock(side_effect=[mock.sentinel.response, grpc_error])
+ mocked_aiter = mock.Mock(spec=["__anext__"])
+ mocked_aiter.__anext__ = mock.AsyncMock(
+ side_effect=[mock.sentinel.response, grpc_error]
+ )
mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
with pytest.raises(exceptions.InvalidArgument) as exc_info:
@@ -186,15 +231,19 @@ async def test_wrap_stream_errors_aiter():
@pytest.mark.asyncio
async def test_wrap_stream_errors_aiter_non_rpc_error():
- non_grpc_error = TypeError('Not a gRPC error')
+ non_grpc_error = TypeError("Not a gRPC error")
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mocked_aiter = mock.Mock(spec=['__anext__'])
- mocked_aiter.__anext__ = mock.AsyncMock(side_effect=[mock.sentinel.response, non_grpc_error])
+ mocked_aiter = mock.Mock(spec=["__anext__"])
+ mocked_aiter.__anext__ = mock.AsyncMock(
+ side_effect=[mock.sentinel.response, non_grpc_error]
+ )
mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
with pytest.raises(TypeError) as exc_info:
@@ -208,7 +257,9 @@ async def test_wrap_stream_errors_aiter_called_multiple_times():
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
assert wrapped_call.__aiter__() == wrapped_call.__aiter__()
@@ -223,7 +274,9 @@ async def test_wrap_stream_errors_write():
mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error])
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
@@ -250,6 +303,28 @@ def test_wrap_errors_non_streaming(wrap_unary_errors):
wrap_unary_errors.assert_called_once_with(callable_)
+def test_grpc_async_stream():
+ """
+ GrpcAsyncStream type should be both an AsyncIterator and a grpc.aio.Call.
+ """
+ instance = grpc_helpers_async.GrpcAsyncStream[int]()
+ assert isinstance(instance, grpc.aio.Call)
+ # should implement __aiter__ and __anext__
+ assert hasattr(instance, "__aiter__")
+ it = instance.__aiter__()
+ assert hasattr(it, "__anext__")
+
+
+def test_awaitable_grpc_call():
+ """
+ AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call.
+ """
+ instance = grpc_helpers_async.AwaitableGrpcCall()
+ assert isinstance(instance, grpc.aio.Call)
+ # should implement __await__
+ assert hasattr(instance, "__await__")
+
+
@mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors")
def test_wrap_errors_streaming(wrap_stream_errors):
callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable)
@@ -257,53 +332,151 @@ def test_wrap_errors_streaming(wrap_stream_errors):
result = grpc_helpers_async.wrap_errors(callable_)
assert result == wrap_stream_errors.return_value
- wrap_stream_errors.assert_called_once_with(callable_)
+ wrap_stream_errors.assert_called_once_with(
+ callable_, grpc_helpers_async._WrappedUnaryStreamCall
+ )
-@mock.patch("grpc.composite_channel_credentials")
+@pytest.mark.parametrize(
+ "attempt_direct_path,target,expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
-def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
- target = "example.com:443"
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers_async.create_channel(target)
+ channel = grpc_helpers_async.create_channel(
+ target, attempt_direct_path=attempt_direct_path
+ )
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=None)
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
+
+
+@pytest.mark.parametrize(
+ "attempt_direct_path,target, expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
+@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
+@mock.patch(
+ "google.auth.transport.requests.Request",
+ autospec=True,
+ return_value=mock.sentinel.Request,
+)
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_default_host(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ request,
+ auth_metadata_plugin,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
+ default_host = "example.com"
+ composite_creds = composite_creds_call.return_value
+ channel = grpc_helpers_async.create_channel(
+ target, default_host=default_host, attempt_direct_path=attempt_direct_path
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+ auth_metadata_plugin.assert_called_once_with(
+ mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
+ )
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
+
+
+@pytest.mark.parametrize(
+ "attempt_direct_path",
+ [
+ None,
+ False,
+ ],
+)
@mock.patch("grpc.composite_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call
+ grpc_secure_channel, default, composite_creds_call, attempt_direct_path
):
target = "example.com:443"
ssl_creds = grpc.ssl_channel_credentials()
- grpc_helpers_async.create_channel(target, ssl_credentials=ssl_creds)
+ grpc_helpers_async.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
+ )
- default.assert_called_once_with(scopes=None)
+ default.assert_called_once_with(scopes=None, default_scopes=None)
composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
composite_creds = composite_creds_call.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
+ target = "example.com:443"
+ ssl_creds = grpc.ssl_channel_credentials()
+ with pytest.raises(
+ ValueError, match="Using ssl_credentials with Direct Path is not supported"
+ ):
+ grpc_helpers_async.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=True
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_scopes(
grpc_secure_channel, default, composite_creds_call
):
@@ -313,8 +486,36 @@ def test_create_channel_implicit_with_scopes(
channel = grpc_helpers_async.create_channel(target, scopes=["one", "two"])
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=["one", "two"])
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+ default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_default_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, default_scopes=["three", "four"], compression=grpc.Compression.Gzip
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
def test_create_channel_explicit_with_duplicate_credentials():
@@ -324,28 +525,34 @@ def test_create_channel_explicit_with_duplicate_credentials():
grpc_helpers_async.create_channel(
target,
credentials_file="credentials.json",
- credentials=mock.sentinel.credentials
+ credentials=mock.sentinel.credentials,
)
assert "mutually exclusive" in str(excinfo.value)
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("google.auth.credentials.with_scopes_if_required")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
target = "example.com:443"
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers_async.create_channel(target, credentials=mock.sentinel.credentials)
+ channel = grpc_helpers_async.create_channel(
+ target, credentials=mock.sentinel.credentials, compression=grpc.Compression.Gzip
+ )
- auth_creds.assert_called_once_with(mock.sentinel.credentials, None)
+ auth_creds.assert_called_once_with(
+ mock.sentinel.credentials, scopes=None, default_scopes=None
+ )
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
target = "example.com:443"
scopes = ["1", "2"]
@@ -355,21 +562,58 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal
credentials.requires_scopes = True
channel = grpc_helpers_async.create_channel(
- target, credentials=credentials, scopes=scopes
+ target,
+ credentials=credentials,
+ scopes=scopes,
+ compression=grpc.Compression.Gzip,
)
- credentials.with_scopes.assert_called_once_with(scopes)
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
-def test_create_channel_explicit_with_quota_project(grpc_secure_channel, composite_creds_call):
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit_default_scopes(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ channel = grpc_helpers_async.create_channel(
+ target,
+ credentials=credentials,
+ default_scopes=default_scopes,
+ compression=grpc.Compression.Gzip,
+ )
+
+ credentials.with_scopes.assert_called_once_with(
+ scopes=None, default_scopes=default_scopes
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit_with_quota_project(
+ grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
composite_creds = composite_creds_call.return_value
- credentials = mock.create_autospec(google.auth.credentials.Credentials, instance=True)
+ credentials = mock.create_autospec(
+ google.auth.credentials.CredentialsWithQuotaProject, instance=True
+ )
channel = grpc_helpers_async.create_channel(
target, credentials=credentials, quota_project_id="project-foo"
@@ -377,16 +621,21 @@ def test_create_channel_explicit_with_quota_project(grpc_secure_channel, composi
credentials.with_quota_project.assert_called_once_with("project-foo")
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
- return_value=(mock.sentinel.credentials, mock.sentinel.project)
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-def test_create_channnel_with_credentials_file(load_credentials_from_file, grpc_secure_channel, composite_creds_call):
+def test_create_channel_with_credentials_file(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
credentials_file = "/path/to/credentials/file.json"
@@ -396,18 +645,25 @@ def test_create_channnel_with_credentials_file(load_credentials_from_file, grpc_
target, credentials_file=credentials_file
)
- google.auth.load_credentials_from_file.assert_called_once_with(credentials_file, scopes=None)
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=None
+ )
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
- return_value=(mock.sentinel.credentials, mock.sentinel.project)
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-def test_create_channel_with_credentials_file_and_scopes(load_credentials_from_file, grpc_secure_channel, composite_creds_call):
+def test_create_channel_with_credentials_file_and_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
scopes = ["1", "2"]
@@ -418,14 +674,46 @@ def test_create_channel_with_credentials_file_and_scopes(load_credentials_from_f
target, credentials_file=credentials_file, scopes=scopes
)
- google.auth.load_credentials_from_file.assert_called_once_with(credentials_file, scopes=scopes)
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=scopes, default_scopes=None
+ )
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_default_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials_file=credentials_file, default_scopes=default_scopes
+ )
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=default_scopes
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@pytest.mark.skipif(grpc_helpers_async.HAS_GRPC_GCP, reason="grpc_gcp module not available")
-@mock.patch("grpc.experimental.aio.secure_channel")
-def test_create_channel_without_grpc_gcp(grpc_secure_channel):
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel(grpc_secure_channel):
target = "example.com:443"
scopes = ["test_scope"]
@@ -434,7 +722,7 @@ def test_create_channel_without_grpc_gcp(grpc_secure_channel):
grpc_helpers_async.create_channel(target, credentials=credentials, scopes=scopes)
grpc_secure_channel.assert_called()
- credentials.with_scopes.assert_called_once_with(scopes)
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
@pytest.mark.asyncio
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
index 419749f3..9d9fb5d2 100644
--- a/tests/asyncio/test_operation_async.py
+++ b/tests/asyncio/test_operation_async.py
@@ -13,9 +13,19 @@
# limitations under the License.
-import mock
import pytest
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+try:
+ import grpc # noqa: F401
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import operation_async
from google.api_core import operations_v1
@@ -74,6 +84,7 @@ async def test_constructor():
assert await future.running()
+@pytest.mark.asyncio
def test_metadata():
expected_metadata = struct_pb2.Struct()
future, _, _ = make_operation_future(
@@ -166,6 +177,7 @@ async def test_unexpected_result(unused_sleep):
assert "Unexpected state" in "{!r}".format(exception)
+@pytest.mark.asyncio
def test_from_gapic():
operation_proto = make_operation_proto(done=True)
operations_client = mock.create_autospec(
@@ -177,12 +189,15 @@ def test_from_gapic():
operations_client,
struct_pb2.Struct,
metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
)
assert future._result_type == struct_pb2.Struct
assert future._metadata_type == struct_pb2.Struct
assert future.operation.name == TEST_OPERATION_NAME
assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
def test_deserialize():
diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py
index 42fac2a2..63e26d02 100644
--- a/tests/asyncio/test_page_iterator_async.py
+++ b/tests/asyncio/test_page_iterator_async.py
@@ -14,20 +14,22 @@
import inspect
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import page_iterator_async
class PageAsyncIteratorImpl(page_iterator_async.AsyncIterator):
-
async def _next_page(self):
return mock.create_autospec(page_iterator_async.Page, instance=True)
class TestAsyncIterator:
-
def test_constructor(self):
client = mock.sentinel.client
item_to_value = mock.sentinel.item_to_value
@@ -47,6 +49,35 @@ def test_constructor(self):
assert iterator.next_page_token == token
assert iterator.num_results == 0
+ @pytest.mark.asyncio
+ async def test_anext(self):
+ parent = mock.sentinel.parent
+ page_1 = page_iterator_async.Page(
+ parent,
+ ("item 1.1", "item 1.2"),
+ page_iterator_async._item_to_value_identity,
+ )
+ page_2 = page_iterator_async.Page(
+ parent, ("item 2.1",), page_iterator_async._item_to_value_identity
+ )
+
+ async_iterator = PageAsyncIteratorImpl(None, None)
+ async_iterator._next_page = mock.AsyncMock(side_effect=[page_1, page_2, None])
+
+ # Consume items and check the state of the async_iterator.
+ assert async_iterator.num_results == 0
+ assert await async_iterator.__anext__() == "item 1.1"
+ assert async_iterator.num_results == 1
+
+ assert await async_iterator.__anext__() == "item 1.2"
+ assert async_iterator.num_results == 2
+
+ assert await async_iterator.__anext__() == "item 2.1"
+ assert async_iterator.num_results == 3
+
+ with pytest.raises(StopAsyncIteration):
+ await async_iterator.__anext__()
+
def test_pages_property_starts(self):
iterator = PageAsyncIteratorImpl(None, None)
@@ -69,7 +100,8 @@ def test_pages_property_restart(self):
async def test__page_aiter_increment(self):
iterator = PageAsyncIteratorImpl(None, None)
page = page_iterator_async.Page(
- iterator, ("item",), page_iterator_async._item_to_value_identity)
+ iterator, ("item",), page_iterator_async._item_to_value_identity
+ )
iterator._next_page = mock.AsyncMock(side_effect=[page, None])
assert iterator.num_results == 0
@@ -78,6 +110,7 @@ async def test__page_aiter_increment(self):
await page_aiter.__anext__()
assert iterator.num_results == 1
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__page_aiter_no_increment(self):
@@ -90,6 +123,7 @@ async def test__page_aiter_no_increment(self):
# results should still be 0 after fetching a page.
assert iterator.num_results == 0
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__items_aiter(self):
@@ -101,9 +135,11 @@ async def test__items_aiter(self):
# Make pages from mock responses
parent = mock.sentinel.parent
page1 = page_iterator_async.Page(
- parent, (item1, item2), page_iterator_async._item_to_value_identity)
+ parent, (item1, item2), page_iterator_async._item_to_value_identity
+ )
page2 = page_iterator_async.Page(
- parent, (item3,), page_iterator_async._item_to_value_identity)
+ parent, (item3,), page_iterator_async._item_to_value_identity
+ )
iterator = PageAsyncIteratorImpl(None, None)
iterator._next_page = mock.AsyncMock(side_effect=[page1, page2, None])
@@ -160,7 +196,6 @@ def test___aiter___restart_after_page(self):
class TestAsyncGRPCIterator(object):
-
def test_constructor(self):
client = mock.sentinel.client
items_field = "items"
diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py
new file mode 100644
index 00000000..c9caa2b1
--- /dev/null
+++ b/tests/asyncio/test_rest_streaming_async.py
@@ -0,0 +1,378 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: set random.seed explicitly in each test function.
+# See related issue: https://github.com/googleapis/python-api-core/issues/689.
+
+import datetime
+import logging
+import random
+import time
+from typing import List, AsyncIterator
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest # noqa: I202
+
+import proto
+
+try:
+ from google.auth.aio.transport import Response
+except ImportError:
+ pytest.skip(
+ "google-api-core[async_rest] is required to test asynchronous rest streaming.",
+ allow_module_level=True,
+ )
+
+from google.api_core import rest_streaming_async
+from google.api import http_pb2
+from google.api import httpbody_pb2
+
+
+from ..helpers import Composer, Song, EchoResponse, parse_responses
+
+
+__protobuf__ = proto.module(package=__name__)
+SEED = int(time.time())
+logging.info(f"Starting async rest streaming tests with random seed: {SEED}")
+random.seed(SEED)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
+class ResponseMock(Response):
+ class _ResponseItr(AsyncIterator[bytes]):
+ def __init__(self, _response_bytes: bytes, random_split=False):
+ self._responses_bytes = _response_bytes
+ self._idx = 0
+ self._random_split = random_split
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ if self._idx >= len(self._responses_bytes):
+ raise StopAsyncIteration
+ if self._random_split:
+ n = random.randint(1, len(self._responses_bytes[self._idx :]))
+ else:
+ n = 1
+ x = self._responses_bytes[self._idx : self._idx + n]
+ self._idx += n
+ return x
+
+ def __init__(
+ self,
+ responses: List[proto.Message],
+ response_cls,
+ random_split=False,
+ ):
+ self._responses = responses
+ self._random_split = random_split
+ self._response_message_cls = response_cls
+
+ def _parse_responses(self):
+ return parse_responses(self._response_message_cls, self._responses)
+
+ @property
+ async def headers(self):
+ raise NotImplementedError()
+
+ @property
+ async def status_code(self):
+ raise NotImplementedError()
+
+ async def close(self):
+ raise NotImplementedError()
+
+ async def content(self, chunk_size=None):
+ itr = self._ResponseItr(
+ self._parse_responses(), random_split=self._random_split
+ )
+ async for chunk in itr:
+ yield chunk
+
+ async def read(self):
+ raise NotImplementedError()
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [(False, True), (False, False)],
+)
+async def test_next_simple(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = EchoResponse
+ responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
+ else:
+ response_type = httpbody_pb2.HttpBody
+ responses = [
+ httpbody_pb2.HttpBody(content_type="hello world"),
+ httpbody_pb2.HttpBody(content_type="yes"),
+ ]
+
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_nested(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="some song", composer=Composer(given_name="some name")),
+ Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
+ ]
+ else:
+ # Although `http_pb2.HttpRule`` is used in the response, any response message
+ # can be used which meets this criteria for the test of having a nested field.
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="some selector",
+ custom=http_pb2.CustomHttpPattern(kind="some kind"),
+ ),
+ http_pb2.HttpRule(
+ selector="another selector",
+ custom=http_pb2.CustomHttpPattern(path="some path"),
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == len(responses)
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_stress(random_split, resp_message_is_proto_plus):
+ n = 50
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
+ for i in range(n)
+ ]
+ else:
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="selector_%d" % i,
+ custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
+ )
+ for i in range(n)
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == n
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_escaped_characters_in_string(
+ random_split, resp_message_is_proto_plus
+):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ composer_with_relateds = Composer()
+ relateds = ["Artist A", "Artist B"]
+ composer_with_relateds.relateds = relateds
+
+ responses = [
+ Song(
+ title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
+ ),
+ Song(
+ title='{"this is weird": "totally"}',
+ composer=Composer(given_name="\\{}\\"),
+ ),
+ Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
+ ]
+ else:
+ response_type = http_pb2.Http
+ responses = [
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='ti"tle\nfoo\tbar{}',
+ custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='{"this is weird": "totally"}',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='\\{"key": ["value",]}\\',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == len(responses)
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_next_not_array(response_type):
+
+ data = '{"hello": 0}'
+ with mock.patch.object(
+ ResponseMock, "content", return_value=mock_async_gen(data)
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ mock_method.assert_called_once()
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_cancel(response_type):
+ with mock.patch.object(
+ ResponseMock, "close", new_callable=mock.AsyncMock
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ await itr.cancel()
+ mock_method.assert_called_once()
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_iterator_as_context_manager(response_type):
+ with mock.patch.object(
+ ResponseMock, "close", new_callable=mock.AsyncMock
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ async with rest_streaming_async.AsyncResponseIterator(resp, response_type):
+ pass
+ mock_method.assert_called_once()
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "response_type,return_value",
+ [
+ (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
+ (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
+ ],
+)
+async def test_check_buffer(response_type, return_value):
+ with mock.patch.object(
+ ResponseMock,
+ "_parse_responses",
+ return_value=return_value,
+ ):
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ await itr.__anext__()
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_next_html(response_type):
+
+ data = ""
+ with mock.patch.object(
+ ResponseMock, "content", return_value=mock_async_gen(data)
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ mock_method.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_invalid_response_class():
+ class SomeClass:
+ pass
+
+ resp = ResponseMock(responses=[], response_cls=SomeClass)
+ with pytest.raises(
+ ValueError,
+ match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
+ ):
+ rest_streaming_async.AsyncResponseIterator(resp, SomeClass)
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 00000000..3429d511
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,71 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for tests"""
+
+import logging
+from typing import List
+
+import proto
+
+from google.protobuf import duration_pb2
+from google.protobuf import timestamp_pb2
+from google.protobuf.json_format import MessageToJson
+
+
+class Genre(proto.Enum):
+ GENRE_UNSPECIFIED = 0
+ CLASSICAL = 1
+ JAZZ = 2
+ ROCK = 3
+
+
+class Composer(proto.Message):
+ given_name = proto.Field(proto.STRING, number=1)
+ family_name = proto.Field(proto.STRING, number=2)
+ relateds = proto.RepeatedField(proto.STRING, number=3)
+ indices = proto.MapField(proto.STRING, proto.STRING, number=4)
+
+
+class Song(proto.Message):
+ composer = proto.Field(Composer, number=1)
+ title = proto.Field(proto.STRING, number=2)
+ lyrics = proto.Field(proto.STRING, number=3)
+ year = proto.Field(proto.INT32, number=4)
+ genre = proto.Field(Genre, number=5)
+ is_five_mins_longer = proto.Field(proto.BOOL, number=6)
+ score = proto.Field(proto.DOUBLE, number=7)
+ likes = proto.Field(proto.INT64, number=8)
+ duration = proto.Field(duration_pb2.Duration, number=9)
+ date_added = proto.Field(timestamp_pb2.Timestamp, number=10)
+
+
+class EchoResponse(proto.Message):
+ content = proto.Field(proto.STRING, number=1)
+
+
+def parse_responses(response_message_cls, all_responses: List[proto.Message]) -> bytes:
+ # json.dumps returns a string surrounded with quotes that need to be stripped
+ # in order to be an actual JSON.
+ json_responses = [
+ (
+ response_message_cls.to_json(response).strip('"')
+ if issubclass(response_message_cls, proto.Message)
+ else MessageToJson(response).strip('"')
+ )
+ for response in all_responses
+ ]
+ logging.info(f"Sending JSON stream: {json_responses}")
+ ret_val = "[{}]".format(",".join(json_responses))
+ return bytes(ret_val, "utf-8")
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
index 98afc599..a37efdd4 100644
--- a/tests/unit/future/test__helpers.py
+++ b/tests/unit/future/test__helpers.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
from google.api_core.future import _helpers
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
index c67de064..2f66f230 100644
--- a/tests/unit/future/test_polling.py
+++ b/tests/unit/future/test_polling.py
@@ -15,16 +15,16 @@
import concurrent.futures
import threading
import time
+from unittest import mock
-import mock
import pytest
-from google.api_core import exceptions
+from google.api_core import exceptions, retry
from google.api_core.future import polling
class PollingFutureImpl(polling.PollingFuture):
- def done(self):
+ def done(self, retry=None):
return False
def cancel(self):
@@ -33,9 +33,6 @@ def cancel(self):
def cancelled(self):
return False
- def running(self):
- return True
-
def test_polling_future_constructor():
future = PollingFutureImpl()
@@ -43,6 +40,8 @@ def test_polling_future_constructor():
assert not future.cancelled()
assert future.running()
assert future.cancel()
+ with mock.patch.object(future, "done", return_value=True):
+ future.result()
def test_set_result():
@@ -82,20 +81,23 @@ def test_invoke_callback_exception():
class PollingFutureImplWithPoll(PollingFutureImpl):
- def __init__(self):
+ def __init__(self, max_poll_count=1):
super(PollingFutureImplWithPoll, self).__init__()
self.poll_count = 0
self.event = threading.Event()
+ self.max_poll_count = max_poll_count
- def done(self):
+ def done(self, retry=None):
self.poll_count += 1
+ if self.max_poll_count > self.poll_count:
+ return False
self.event.wait()
self.set_result(42)
return True
-def test_result_with_polling():
- future = PollingFutureImplWithPoll()
+def test_result_with_one_polling():
+ future = PollingFutureImplWithPoll(max_poll_count=1)
future.event.set()
result = future.result()
@@ -107,8 +109,34 @@ def test_result_with_polling():
assert future.poll_count == 1
+def test_result_with_two_pollings():
+ future = PollingFutureImplWithPoll(max_poll_count=2)
+
+ future.event.set()
+ result = future.result()
+
+ assert result == 42
+ assert future.poll_count == 2
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 2
+
+
+def test_result_with_two_pollings_custom_retry():
+ future = PollingFutureImplWithPoll(max_poll_count=2)
+
+ future.event.set()
+ result = future.result()
+
+ assert result == 42
+ assert future.poll_count == 2
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 2
+
+
class PollingFutureImplTimeout(PollingFutureImplWithPoll):
- def done(self):
+ def done(self, retry=None):
time.sleep(1)
return False
@@ -130,11 +158,11 @@ def __init__(self, errors):
super(PollingFutureImplTransient, self).__init__()
self._errors = errors
- def done(self):
+ def done(self, retry=None):
+ self.poll_count += 1
if self._errors:
error, self._errors = self._errors[0], self._errors[1:]
raise error("testing")
- self.poll_count += 1
self.set_result(42)
return True
@@ -142,17 +170,17 @@ def done(self):
def test_result_transient_error():
future = PollingFutureImplTransient(
(
- exceptions.TooManyRequests,
- exceptions.InternalServerError,
- exceptions.BadGateway,
+ polling._OperationNotComplete,
+ polling._OperationNotComplete,
+ polling._OperationNotComplete,
)
)
result = future.result()
assert result == 42
- assert future.poll_count == 1
+ assert future.poll_count == 4
# Repeated calls should not cause additional polling
assert future.result() == result
- assert future.poll_count == 1
+ assert future.poll_count == 4
def test_callback_background_thread():
@@ -192,3 +220,49 @@ def test_double_callback_background_thread():
assert future.poll_count == 1
callback.assert_called_once_with(future)
callback2.assert_called_once_with(future)
+
+
+class PollingFutureImplWithoutRetry(PollingFutureImpl):
+ def done(self, retry=None):
+ return True
+
+ def result(self, timeout=None, retry=None, polling=None):
+ return super(PollingFutureImplWithoutRetry, self).result()
+
+ def _blocking_poll(self, timeout=None, retry=None, polling=None):
+ return super(PollingFutureImplWithoutRetry, self)._blocking_poll(
+ timeout=timeout
+ )
+
+
+class PollingFutureImplWith_done_or_raise(PollingFutureImpl):
+ def done(self, retry=None):
+ return True
+
+ def _done_or_raise(self, retry=None):
+ return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise()
+
+
+def test_polling_future_without_retry():
+ custom_retry = retry.Retry(
+ predicate=retry.if_exception_type(exceptions.TooManyRequests)
+ )
+ future = PollingFutureImplWithoutRetry()
+ assert future.done()
+ assert not future.running()
+ assert future.result() is None
+
+ with mock.patch.object(future, "done") as done_mock:
+ future._done_or_raise()
+ done_mock.assert_called_once_with(retry=None)
+
+ with mock.patch.object(future, "done") as done_mock:
+ future._done_or_raise(retry=custom_retry)
+ done_mock.assert_called_once_with(retry=custom_retry)
+
+
+def test_polling_future_with__done_or_raise():
+ future = PollingFutureImplWith_done_or_raise()
+ assert future.done()
+ assert not future.running()
+ assert future.result() is None
diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py
index 64080ffd..2ca5c404 100644
--- a/tests/unit/gapic/test_client_info.py
+++ b/tests/unit/gapic/test_client_info.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core.gapic_v1 import client_info
diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py
index 1c15261d..5e42fde8 100644
--- a/tests/unit/gapic/test_config.py
+++ b/tests/unit/gapic/test_config.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core.gapic_v1 import config
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
index 1ae27de0..8896429c 100644
--- a/tests/unit/gapic/test_method.py
+++ b/tests/unit/gapic/test_method.py
@@ -13,8 +13,15 @@
# limitations under the License.
import datetime
+from unittest import mock
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
-import mock
from google.api_core import exceptions
from google.api_core import retry
@@ -32,27 +39,6 @@ def _utcnow_monotonic():
curr_value += delta
-def test__determine_timeout():
- # Check _determine_timeout always returns a Timeout object.
- timeout_type_timeout = timeout.ConstantTimeout(600.0)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- 600.0, 600.0, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- 600.0, timeout_type_timeout, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- timeout_type_timeout, 600.0, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- timeout_type_timeout, timeout_type_timeout, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
-
-
def test_wrap_method_basic():
method = mock.Mock(spec=["__call__"], return_value=42)
@@ -90,6 +76,7 @@ def test_wrap_method_with_custom_client_info():
api_core_version=3,
gapic_version=4,
client_library_version=5,
+ protobuf_runtime_version=6,
)
method = mock.Mock(spec=["__call__"])
@@ -135,92 +122,71 @@ def test_invoke_wrapped_method_with_metadata_as_none():
@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout(unusued_sleep):
+def test_wrap_method_with_default_retry_and_timeout_and_compression(unused_sleep):
method = mock.Mock(
spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
)
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method()
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unusued_sleep):
+def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
method = mock.Mock(
spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
)
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method(
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
+ compression=google.api_core.gapic_v1.method.DEFAULT,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("time.sleep")
-def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep):
- method = mock.Mock(spec=["__call__"], side_effect=[
- exceptions.NotFound(None), 42])
+def test_wrap_method_with_overriding_retry_timeout_compression(unused_sleep):
+ method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42])
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method(
retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)),
timeout=timeout.ConstantTimeout(22),
+ compression=grpc.Compression.Deflate,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=22, metadata=mock.ANY)
-
-
-@mock.patch("time.sleep")
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- side_effect=_utcnow_monotonic(),
- autospec=True,
-)
-def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
- method = mock.Mock(
- spec=["__call__"],
- side_effect=([exceptions.InternalServerError(None)] * 4) + [42],
- )
- default_retry = retry.Retry()
- default_timeout = timeout.ExponentialTimeout(deadline=60)
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
- )
-
- # Overriding only the retry's deadline should also override the timeout's
- # deadline.
- result = wrapped_method(retry=default_retry.with_deadline(30))
-
- assert result == 42
- timeout_args = [call[1]["timeout"] for call in method.call_args_list]
- assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
- assert utcnow.call_count == (
- 1
- + 5 # First to set the deadline.
- + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ method.assert_called_with(
+ timeout=22, compression=grpc.Compression.Deflate, metadata=mock.ANY
)
@@ -236,3 +202,24 @@ def test_wrap_method_with_overriding_timeout_as_a_number():
assert result == 42
method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+
+def test_wrap_method_with_call():
+ method = mock.Mock()
+ mock_call = mock.Mock()
+ method.with_call.return_value = 42, mock_call
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
+ result = wrapped_method()
+ assert len(result) == 2
+ assert result[0] == 42
+ assert result[1] == mock_call
+
+
+def test_wrap_method_with_call_not_supported():
+ """Raises an error if wrapped callable doesn't have with_call method."""
+ method = lambda: None # noqa: E731
+
+ with pytest.raises(ValueError) as exc_info:
+ google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
+ assert "with_call=True is only supported for unary calls" in str(exc_info.value)
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
index 77300e87..2c8c7546 100644
--- a/tests/unit/gapic/test_routing_header.py
+++ b/tests/unit/gapic/test_routing_header.py
@@ -12,6 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from enum import Enum
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core.gapic_v1 import routing_header
@@ -28,7 +37,67 @@ def test_to_routing_header_with_slashes():
assert value == "name=me/ep&book.read=1%262"
+def test_enum_fully_qualified():
+ class Message:
+ class Color(Enum):
+ RED = 1
+ GREEN = 2
+ BLUE = 3
+
+ params = [("color", Message.Color.RED)]
+ value = routing_header.to_routing_header(params)
+ assert value == "color=Color.RED"
+ value = routing_header.to_routing_header(params, qualified_enums=True)
+ assert value == "color=Color.RED"
+
+
+def test_enum_nonqualified():
+ class Message:
+ class Color(Enum):
+ RED = 1
+ GREEN = 2
+ BLUE = 3
+
+ params = [("color", Message.Color.RED), ("num", 5)]
+ value = routing_header.to_routing_header(params, qualified_enums=False)
+ assert value == "color=RED&num=5"
+ params = {"color": Message.Color.RED, "num": 5}
+ value = routing_header.to_routing_header(params, qualified_enums=False)
+ assert value == "color=RED&num=5"
+
+
def test_to_grpc_metadata():
params = [("name", "meep"), ("book.read", "1")]
metadata = routing_header.to_grpc_metadata(params)
assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
+
+
+@pytest.mark.parametrize(
+ "key,value,expected",
+ [
+ ("book.read", "1", "book.read=1"),
+ ("name", "me/ep", "name=me/ep"),
+ ("\\", "=", "%5C=%3D"),
+ (b"hello", "world", "hello=world"),
+ ("✔️", "✌️", "%E2%9C%94%EF%B8%8F=%E2%9C%8C%EF%B8%8F"),
+ ],
+)
+def test__urlencode_param(key, value, expected):
+ result = routing_header._urlencode_param(key, value)
+ assert result == expected
+
+
+def test__urlencode_param_caching_performance():
+ import time
+
+ key = "key" * 100
+ value = "value" * 100
+ # time with empty cache
+ start_time = time.perf_counter()
+ routing_header._urlencode_param(key, value)
+ duration = time.perf_counter() - start_time
+ second_start_time = time.perf_counter()
+ routing_header._urlencode_param(key, value)
+ second_duration = time.perf_counter() - second_start_time
+ # second call should be approximately 10 times faster
+ assert second_duration < duration / 10
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
index cc574612..fb4b14f1 100644
--- a/tests/unit/operations_v1/test_operations_client.py
+++ b/tests/unit/operations_v1/test_operations_client.py
@@ -12,9 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import page_iterator
+from google.api_core.operations_v1 import operations_client_config
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
@@ -24,8 +32,12 @@ def test_get_operation():
client = operations_v1.OperationsClient(channel)
channel.GetOperation.response = operations_pb2.Operation(name="meep")
- response = client.get_operation("name")
+ response = client.get_operation("name", metadata=[("header", "foo")])
+ assert ("header", "foo") in channel.GetOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.GetOperation.calls[
+ 0
+ ].metadata
assert len(channel.GetOperation.requests) == 1
assert channel.GetOperation.requests[0].name == "name"
assert response == channel.GetOperation.response
@@ -41,11 +53,15 @@ def test_list_operations():
list_response = operations_pb2.ListOperationsResponse(operations=operations)
channel.ListOperations.response = list_response
- response = client.list_operations("name", "filter")
+ response = client.list_operations("name", "filter", metadata=[("header", "foo")])
assert isinstance(response, page_iterator.Iterator)
assert list(response) == operations
+ assert ("header", "foo") in channel.ListOperations.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.ListOperations.calls[
+ 0
+ ].metadata
assert len(channel.ListOperations.requests) == 1
request = channel.ListOperations.requests[0]
assert isinstance(request, operations_pb2.ListOperationsRequest)
@@ -58,8 +74,12 @@ def test_delete_operation():
client = operations_v1.OperationsClient(channel)
channel.DeleteOperation.response = empty_pb2.Empty()
- client.delete_operation("name")
+ client.delete_operation("name", metadata=[("header", "foo")])
+ assert ("header", "foo") in channel.DeleteOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.DeleteOperation.calls[
+ 0
+ ].metadata
assert len(channel.DeleteOperation.requests) == 1
assert channel.DeleteOperation.requests[0].name == "name"
@@ -69,7 +89,15 @@ def test_cancel_operation():
client = operations_v1.OperationsClient(channel)
channel.CancelOperation.response = empty_pb2.Empty()
- client.cancel_operation("name")
+ client.cancel_operation("name", metadata=[("header", "foo")])
+ assert ("header", "foo") in channel.CancelOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.CancelOperation.calls[
+ 0
+ ].metadata
assert len(channel.CancelOperation.requests) == 1
assert channel.CancelOperation.requests[0].name == "name"
+
+
+def test_operations_client_config():
+ assert operations_client_config.config["interfaces"]
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
new file mode 100644
index 00000000..d1f6e0eb
--- /dev/null
+++ b/tests/unit/operations_v1/test_operations_rest_client.py
@@ -0,0 +1,1401 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest
+from typing import Any, List
+
+try:
+ import grpc # noqa: F401
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+from requests import Response # noqa I201
+from google.auth.transport.requests import AuthorizedSession
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core.operations_v1 import AbstractOperationsClient
+
+import google.auth
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1 import pagers_async
+from google.api_core.operations_v1 import transports
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import json_format # type: ignore
+from google.rpc import status_pb2 # type: ignore
+
+try:
+ import aiohttp # noqa: F401
+ import google.auth.aio.transport
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession
+ from google.api_core.operations_v1 import AsyncOperationsRestClient
+ from google.auth.aio import credentials as ga_credentials_async
+
+ GOOGLE_AUTH_AIO_INSTALLED = True
+except ImportError:
+ GOOGLE_AUTH_AIO_INSTALLED = False
+
+HTTP_OPTIONS = {
+ "google.longrunning.Operations.CancelOperation": [
+ {"method": "post", "uri": "/v3/{name=operations/*}:cancel", "body": "*"},
+ ],
+ "google.longrunning.Operations.DeleteOperation": [
+ {"method": "delete", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.GetOperation": [
+ {"method": "get", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.ListOperations": [
+ {"method": "get", "uri": "/v3/{name=operations}"},
+ ],
+}
+
+PYPARAM_CLIENT: List[Any] = [
+ AbstractOperationsClient,
+]
+PYPARAM_CLIENT_TRANSPORT_NAME = [
+ [AbstractOperationsClient, transports.OperationsRestTransport, "rest"],
+]
+PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [
+ [
+ AbstractOperationsClient,
+ transports.OperationsRestTransport,
+ ga_credentials.AnonymousCredentials(),
+ ],
+]
+
+if GOOGLE_AUTH_AIO_INSTALLED:
+ PYPARAM_CLIENT.append(AsyncOperationsRestClient)
+ PYPARAM_CLIENT_TRANSPORT_NAME.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ "rest_asyncio",
+ ]
+ )
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ ga_credentials_async.AnonymousCredentials(),
+ ]
+ )
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+def _get_session_type(is_async: bool):
+ return (
+ AsyncAuthorizedSession
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED
+ else AuthorizedSession
+ )
+
+
+def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS):
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED:
+ async_transport = transports.rest_asyncio.AsyncOperationsRestTransport(
+ credentials=ga_credentials_async.AnonymousCredentials(),
+ http_options=http_options,
+ )
+ return AsyncOperationsRestClient(transport=async_transport)
+ else:
+ sync_transport = transports.rest.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+ )
+ return AbstractOperationsClient(transport=sync_transport)
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+# TODO: Add support for mtls in async rest
+@pytest.mark.parametrize(
+ "client_class",
+ [
+ AbstractOperationsClient,
+ ],
+)
+def test__get_default_mtls_endpoint(client_class):
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert client_class._get_default_mtls_endpoint(None) is None
+ assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+ assert (
+ client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+ )
+ assert (
+ client_class._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_operations_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ if "async" in str(client_class):
+ # TODO(): Add support for service account info to async REST transport.
+ with pytest.raises(NotImplementedError):
+ info = {"valid": True}
+ client_class.from_service_account_info(info)
+ else:
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "https://longrunning.googleapis.com"
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for
+ # service account credentials in transports.AsyncOperationsRestTransport
+ ],
+)
+def test_operations_client_service_account_always_use_jwt(transport_class):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_operations_client_from_service_account_file(client_class):
+
+ if "async" in str(client_class):
+ # TODO(): Add support for service account creds to async REST transport.
+ with pytest.raises(NotImplementedError):
+ client_class.from_service_account_file("dummy/file/path.json")
+ else:
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "https://longrunning.googleapis.com"
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ PYPARAM_CLIENT_TRANSPORT_NAME,
+)
+def test_operations_client_get_transport_class(
+ client_class, transport_class, transport_name
+):
+ transport = client_class.get_transport_class()
+ available_transports = [
+ transports.OperationsRestTransport,
+ ]
+ if GOOGLE_AUTH_AIO_INSTALLED:
+ available_transports.append(transports.AsyncOperationsRestTransport)
+ assert transport in available_transports
+
+ transport = client_class.get_transport_class(transport_name)
+ assert transport == transport_class
+
+
+# TODO(): Update this test case to include async REST once we have support for MTLS.
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+def test_operations_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # # Check that if channel is provided we won't create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_class())
+ # gtc.assert_not_called()
+
+ # # Check that if channel is provided via str we will create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_name)
+ # gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+# TODO: Add support for mtls in async REST
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "true"),
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "false"),
+ ],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_operations_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+
+ def fake_init(client_cert_source_for_mtls=None, **kwargs):
+ """Invoke client_cert source if provided."""
+
+ if client_cert_source_for_mtls:
+ client_cert_source_for_mtls()
+ return None
+
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.side_effect = fake_init
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ PYPARAM_CLIENT_TRANSPORT_NAME,
+)
+def test_operations_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(
+ scopes=["1", "2"],
+ )
+ if "async" in str(client_class):
+ # TODO(): Add support for scopes to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ PYPARAM_CLIENT_TRANSPORT_NAME,
+)
+def test_operations_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ if "async" in str(client_class):
+ # TODO(): Add support for credentials file to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+def test_list_operations_rest():
+ client = _get_operations_client(is_async=False)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListOperationsPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_operations_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+
+ client = _get_operations_client(is_async=True)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ response = await client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers_async.ListOperationsAsyncPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_operations_rest_failure():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.list_operations(name="operations")
+
+
+@pytest.mark.asyncio
+async def test_list_operations_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.list_operations(name="operations")
+
+
+def test_list_operations_rest_pager():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[],
+ next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(Response() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val._content = response_val.encode("UTF-8")
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = client.list_operations(name="operations")
+
+ results = list(pager)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = list(client.list_operations(name="operations").pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_operations_rest_pager_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[],
+ next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(mock.Mock() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8"))
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ results = list(responses)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = []
+
+ async for page in pager.pages:
+ pages.append(page)
+ for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+def test_get_operation_rest():
+ client = _get_operations_client(is_async=False)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
+@pytest.mark.asyncio
+async def test_get_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(return_value=json_return_value)
+ req.return_value = response_value
+ response = await client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
+def test_get_operation_rest_failure():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.get_operation("sample0/operations/sample1")
+
+
+@pytest.mark.asyncio
+async def test_get_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.get_operation("sample0/operations/sample1")
+
+
+def test_delete_operation_rest():
+ client = _get_operations_client(is_async=False)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+
+def test_delete_operation_rest_failure():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.delete_operation(name="sample0/operations/sample1")
+
+
+@pytest.mark.asyncio
+async def test_delete_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.delete_operation(name="sample0/operations/sample1")
+
+
+def test_cancel_operation_rest():
+ client = _get_operations_client(is_async=False)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
+ )
+
+
+@pytest.mark.asyncio
+async def test_cancel_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
+ )
+
+
+def test_cancel_operation_rest_failure():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.cancel_operation(name="sample0/operations/sample1")
+
+
+@pytest.mark.asyncio
+async def test_cancel_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.cancel_operation(name="sample0/operations/sample1")
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_credentials_transport_error(client_class, transport_class, credentials):
+
+ # It is an error to provide credentials and a transport instance.
+ transport = transport_class(credentials=credentials)
+ with pytest.raises(ValueError):
+ client_class(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transport_class(credentials=credentials)
+ with pytest.raises(ValueError):
+ client_class(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transport_class(credentials=credentials)
+ with pytest.raises(ValueError):
+ client_class(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_instance(client_class, transport_class, credentials):
+ # A client may be instantiated with a custom transport instance.
+ transport = transport_class(
+ credentials=credentials,
+ )
+ client = client_class(transport=transport)
+ assert client.transport is transport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_adc(client_class, transport_class, credentials):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (credentials, None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_operations_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+ transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_operations_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "list_operations",
+ "get_operation",
+ "delete_operation",
+ "cancel_operation",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
+
+def test_operations_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport(
+ credentials_file="credentials.json",
+ quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=(),
+ quota_project_id="octopus",
+ )
+
+
+def test_operations_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport()
+ adc.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_operations_auth_adc(client_class):
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+
+ if "async" in str(client_class).lower():
+ # TODO(): Add support for adc to async REST transport.
+ # NOTE: Ideally, the logic for adc shouldn't be called if transport
+ # is set to async REST. If the user does not configure credentials
+ # of type `google.auth.aio.credentials.Credentials`,
+ # we should raise an exception to avoid the adc workflow.
+ with pytest.raises(google.auth.exceptions.InvalidType):
+ client_class()
+ else:
+ client_class()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=(),
+ quota_project_id=None,
+ )
+
+
+# TODO(https://github.com/googleapis/python-api-core/issues/705): Add
+# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported
+# in `google.auth.aio.transport`.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ ],
+)
+def test_operations_http_transport_client_cert_source_for_mtls(transport_class):
+ cred = ga_credentials.AnonymousCredentials()
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ) as mock_configure_mtls_channel:
+ transport_class(
+ credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
+ )
+ mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_no_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "https://longrunning.googleapis.com"
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_with_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "https://longrunning.googleapis.com:8000"
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_billing_account_path(client_class):
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = client_class.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_billing_account_path(client_class):
+ expected = {
+ "billing_account": "clam",
+ }
+ path = client_class.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = client_class.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_folder_path(client_class):
+ folder = "whelk"
+ expected = "folders/{folder}".format(
+ folder=folder,
+ )
+ actual = client_class.common_folder_path(folder)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_folder_path(client_class):
+ expected = {
+ "folder": "octopus",
+ }
+ path = client_class.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = client_class.parse_common_folder_path(path)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_organization_path(client_class):
+ organization = "oyster"
+ expected = "organizations/{organization}".format(
+ organization=organization,
+ )
+ actual = client_class.common_organization_path(organization)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_organization_path(client_class):
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = client_class.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = client_class.parse_common_organization_path(path)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_project_path(client_class):
+ project = "cuttlefish"
+ expected = "projects/{project}".format(
+ project=project,
+ )
+ actual = client_class.common_project_path(project)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_project_path(client_class):
+ expected = {
+ "project": "mussel",
+ }
+ path = client_class.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = client_class.parse_common_project_path(path)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_location_path(client_class):
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+ actual = client_class.common_location_path(project, location)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_location_path(client_class):
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = client_class.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = client_class.parse_common_location_path(path)
+ assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials):
+ client_info = gapic_v1.client_info.ClientInfo()
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
+ client_class(
+ credentials=credentials,
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
+ transport_class(
+ credentials=credentials,
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py
new file mode 100644
index 00000000..212c4293
--- /dev/null
+++ b/tests/unit/retry/test_retry_base.py
@@ -0,0 +1,293 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import re
+from unittest import mock
+
+import pytest
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.auth import exceptions as auth_exceptions
+
+
+def test_if_exception_type():
+ predicate = retry.if_exception_type(ValueError)
+
+ assert predicate(ValueError())
+ assert not predicate(TypeError())
+
+
+def test_if_exception_type_multiple():
+ predicate = retry.if_exception_type(ValueError, TypeError)
+
+ assert predicate(ValueError())
+ assert predicate(TypeError())
+ assert not predicate(RuntimeError())
+
+
+def test_if_transient_error():
+ assert retry.if_transient_error(exceptions.InternalServerError(""))
+ assert retry.if_transient_error(exceptions.TooManyRequests(""))
+ assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
+ assert retry.if_transient_error(requests.exceptions.ConnectionError(""))
+ assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError(""))
+ assert retry.if_transient_error(auth_exceptions.TransportError(""))
+ assert not retry.if_transient_error(exceptions.InvalidArgument(""))
+
+
+# Make uniform return half of its maximum, which will be the calculated
+# sleep time.
+@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+def test_exponential_sleep_generator_base_2(uniform):
+ gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
+
+ result = list(itertools.islice(gen, 8))
+ assert result == [1, 2, 4, 8, 16, 32, 60, 60]
+
+
+def test_build_retry_error_empty_list():
+ """
+ attempt to build a retry error with no errors encountered
+ should return a generic RetryError
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.NON_RETRYABLE_ERROR
+ src, cause = build_retry_error([], reason, 10)
+ assert isinstance(src, exceptions.RetryError)
+ assert cause is None
+ assert src.message == "Unknown error"
+
+
+def test_build_retry_error_timeout_message():
+ """
+ should provide helpful error message when timeout is reached
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.TIMEOUT
+ cause = RuntimeError("timeout")
+ src, found_cause = build_retry_error([ValueError(), cause], reason, 10)
+ assert isinstance(src, exceptions.RetryError)
+ assert src.message == "Timeout of 10.0s exceeded"
+ # should attach appropriate cause
+ assert found_cause is cause
+
+
+def test_build_retry_error_empty_timeout():
+ """
+ attempt to build a retry error when timeout is None
+ should return a generic timeout error message
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.TIMEOUT
+ src, _ = build_retry_error([], reason, None)
+ assert isinstance(src, exceptions.RetryError)
+ assert src.message == "Timeout exceeded"
+
+
+class Test_BaseRetry(object):
+ def _make_one(self, *args, **kwargs):
+ return retry.retry_base._BaseRetry(*args, **kwargs)
+
+ def test_constructor_defaults(self):
+ retry_ = self._make_one()
+ assert retry_._predicate == retry.if_transient_error
+ assert retry_._initial == 1
+ assert retry_._maximum == 60
+ assert retry_._multiplier == 2
+ assert retry_._timeout == 120
+ assert retry_._on_error is None
+ assert retry_.timeout == 120
+ assert retry_.timeout == 120
+
+ def test_constructor_options(self):
+ _some_function = mock.Mock()
+
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=_some_function,
+ )
+ assert retry_._predicate == mock.sentinel.predicate
+ assert retry_._initial == 1
+ assert retry_._maximum == 2
+ assert retry_._multiplier == 3
+ assert retry_._timeout == 4
+ assert retry_._on_error is _some_function
+
+ @pytest.mark.parametrize("use_deadline", [True, False])
+ @pytest.mark.parametrize("value", [None, 0, 1, 4, 42, 5.5])
+ def test_with_timeout(self, use_deadline, value):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = (
+ retry_.with_timeout(value)
+ if not use_deadline
+ else retry_.with_deadline(value)
+ )
+ assert retry_ is not new_retry
+ assert new_retry._timeout == value
+ assert (
+ new_retry.timeout == value
+ if not use_deadline
+ else new_retry.deadline == value
+ )
+
+ # the rest of the attributes should remain the same
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_predicate(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_predicate(mock.sentinel.predicate)
+ assert retry_ is not new_retry
+ assert new_retry._predicate == mock.sentinel.predicate
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_noop(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay()
+ assert retry_ is not new_retry
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+
+ @pytest.mark.parametrize(
+ "originals,updated,expected",
+ [
+ [(1, 2, 3), (4, 5, 6), (4, 5, 6)],
+ [(1, 2, 3), (0, 0, 0), (0, 0, 0)],
+ [(1, 2, 3), (None, None, None), (1, 2, 3)],
+ [(0, 0, 0), (None, None, None), (0, 0, 0)],
+ [(1, 2, 3), (None, 0.5, None), (1, 0.5, 3)],
+ [(1, 2, 3), (None, 0.5, 4), (1, 0.5, 4)],
+ [(1, 2, 3), (9, None, None), (9, 2, 3)],
+ ],
+ )
+ def test_with_delay(self, originals, updated, expected):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=originals[0],
+ maximum=originals[1],
+ multiplier=originals[2],
+ timeout=14,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(
+ initial=updated[0], maximum=updated[1], multiplier=updated[2]
+ )
+ assert retry_ is not new_retry
+ assert new_retry._initial == expected[0]
+ assert new_retry._maximum == expected[1]
+ assert new_retry._multiplier == expected[2]
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_partial_options(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(initial=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 4
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(maximum=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 4
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(multiplier=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 4
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = self._make_one(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<_BaseRetry predicate=, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
diff --git a/tests/unit/retry/test_retry_imports.py b/tests/unit/retry/test_retry_imports.py
new file mode 100644
index 00000000..597909fc
--- /dev/null
+++ b/tests/unit/retry/test_retry_imports.py
@@ -0,0 +1,33 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_legacy_imports_retry_unary_sync():
+ # TODO: Delete this test when when we revert these imports on the
+ # next major version release
+ # (https://github.com/googleapis/python-api-core/issues/576)
+ from google.api_core.retry import datetime_helpers # noqa: F401
+ from google.api_core.retry import exceptions # noqa: F401
+ from google.api_core.retry import auth_exceptions # noqa: F401
+
+
+def test_legacy_imports_retry_unary_async():
+ # TODO: Delete this test when when we revert these imports on the
+ # next major version release
+ # (https://github.com/googleapis/python-api-core/issues/576)
+ from google.api_core import retry_async # noqa: F401
+
+ # See https://github.com/googleapis/python-api-core/issues/586
+ # for context on why we need to test this import this explicitly.
+ from google.api_core.retry_async import AsyncRetry # noqa: F401
diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py
new file mode 100644
index 00000000..2499b2ae
--- /dev/null
+++ b/tests/unit/retry/test_retry_streaming.py
@@ -0,0 +1,505 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core.retry import retry_streaming
+
+from .test_retry_base import Test_BaseRetry
+
+
+def test_retry_streaming_target_bad_sleep_generator():
+ with pytest.raises(
+ ValueError, match="Sleep generator stopped yielding sleep values"
+ ):
+ next(retry_streaming.retry_target_stream(None, lambda x: True, [], None))
+
+
+@mock.patch("time.sleep", autospec=True)
+def test_retry_streaming_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ from functools import partial
+
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ error_target = partial(TestStreamingRetry._generator_mock, error_on=0)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ next(
+ retry_streaming.retry_target_stream(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
+ )
+ )
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
+
+
+class TestStreamingRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_streaming.StreamingRetry(*args, **kwargs)
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r", "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ @staticmethod
+ def _generator_mock(
+ num=5,
+ error_on=None,
+ return_val=None,
+ exceptions_seen=None,
+ ):
+ """
+ Helper to create a mock generator that yields a number of values
+ Generator can optionally raise an exception on a specific iteration
+
+ Args:
+ - num (int): the number of values to yield. After this, the generator will return `return_val`
+ - error_on (int): if given, the generator will raise a ValueError on the specified iteration
+ - return_val (any): if given, the generator will return this value after yielding num values
+ - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
+ """
+ try:
+ for i in range(num):
+ if error_on is not None and i == error_on:
+ raise ValueError("generator mock error")
+ yield i
+ return return_val
+ except (Exception, BaseException, GeneratorExit) as e:
+ # keep track of exceptions seen by generator
+ if exceptions_seen is not None:
+ exceptions_seen.append(e)
+ raise
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___success(self, sleep):
+ """
+ Test that a retry-decorated generator yields values as expected
+ This test checks a generator with no issues
+ """
+ import types
+ import collections
+
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ num = 10
+ result = decorated(num)
+ # check types
+ assert isinstance(decorated(num), collections.abc.Iterable)
+ assert isinstance(decorated(num), types.GeneratorType)
+ assert isinstance(self._generator_mock(num), collections.abc.Iterable)
+ assert isinstance(self._generator_mock(num), types.GeneratorType)
+ # check yield contents
+ unpacked = [i for i in result]
+ assert len(unpacked) == num
+ for a, b in zip(unpacked, self._generator_mock(num)):
+ assert a == b
+ sleep.assert_not_called()
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___retry(self, sleep):
+ """
+ Tests that a retry-decorated generator will retry on errors
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ on_error=on_error,
+ predicate=retry.if_exception_type(ValueError),
+ timeout=None,
+ )
+ result = retry_(self._generator_mock)(error_on=3)
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [next(result) for i in range(10)]
+ assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
+ assert on_error.call_count == 3
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("time.sleep", autospec=True)
+ @pytest.mark.parametrize("use_deadline_arg", [True, False])
+ def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg):
+ """
+ Tests that a retry-decorated generator will throw a RetryError
+ after using the time budget
+ """
+ import time
+
+ timeout_val = 30.9
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val}
+ if not use_deadline_arg
+ else {"deadline": timeout_val}
+ )
+
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=retry.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ **timeout_kwarg,
+ )
+
+ timenow = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=timenow,
+ )
+
+ decorated = retry_(self._generator_mock, on_error=on_error)
+ generator = decorated(error_on=1)
+ with now_patcher as patched_now:
+ # Make sure that calls to fake time.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_now.return_value += sleep_delay
+
+ sleep.side_effect = increase_time
+ with pytest.raises(exceptions.RetryError):
+ [i for i in generator]
+
+ assert on_error.call_count == 5
+ # check the delays
+ assert sleep.call_count == 4 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+ assert last_wait == 8.0
+ assert total_wait == 15.0
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_send(self, sleep):
+ """
+ Send should be passed through retry into target generator
+ """
+
+ def _mock_send_gen():
+ """
+ always yield whatever was sent in
+ """
+ in_ = yield
+ while True:
+ in_ = yield in_
+
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(_mock_send_gen)
+
+ generator = decorated()
+ result = next(generator)
+ # first yield should be None
+ assert result is None
+ in_messages = ["test_1", "hello", "world"]
+ out_messages = []
+ for msg in in_messages:
+ recv = generator.send(msg)
+ out_messages.append(recv)
+ assert in_messages == out_messages
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_send_retry(self, sleep):
+ """
+ Send should support retries like next
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ on_error=on_error,
+ predicate=retry.if_exception_type(ValueError),
+ timeout=None,
+ )
+ result = retry_(self._generator_mock)(error_on=3)
+ with pytest.raises(TypeError) as exc_info:
+ # calling first send with non-None input should raise a TypeError
+ result.send("can not send to fresh generator")
+ assert exc_info.match("can't send non-None value")
+ # initiate iteration with None
+ result = retry_(self._generator_mock)(error_on=3)
+ assert result.send(None) == 0
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [result.send(i) for i in range(10)]
+ assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
+ assert on_error.call_count == 3
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_send(self, sleep):
+ """
+ send should raise attribute error if wrapped iterator does not support it
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+ generator = decorated(5)
+ # initialize
+ next(generator)
+ # call send
+ with pytest.raises(AttributeError):
+ generator.send("test")
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_close(self, sleep):
+ """
+ close should be handled by wrapper if wrapped iterable does not support it
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+
+ # try closing active generator
+ retryable = decorated(10)
+ assert next(retryable) == 0
+ retryable.close()
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ # try closing a new generator
+ retryable = decorated(10)
+ retryable.close()
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_throw(self, sleep):
+ """
+ Throw should work even if the wrapped iterable does not support it
+ """
+ predicate = retry.if_exception_type(ValueError)
+ retry_ = retry_streaming.StreamingRetry(predicate=predicate)
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+
+ # try throwing with active generator
+ retryable = decorated(10)
+ assert next(retryable) == 0
+ # should swallow errors in predicate
+ retryable.throw(ValueError)
+ assert next(retryable) == 1
+ # should raise on other errors
+ with pytest.raises(TypeError):
+ retryable.throw(TypeError)
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ # try throwing with a new generator
+ retryable = decorated(10)
+ with pytest.raises(ValueError):
+ retryable.throw(ValueError)
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_return(self, sleep):
+ """
+ Generator return value should be passed through retry decorator
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ expected_value = "done"
+ generator = decorated(5, return_val=expected_value)
+ found_value = None
+ try:
+ while True:
+ next(generator)
+ except StopIteration as e:
+ found_value = e.value
+ assert found_value == expected_value
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ generator.close()
+ assert isinstance(exception_list[0], GeneratorExit)
+ with pytest.raises(StopIteration):
+ # calling next on closed generator should raise error
+ next(generator)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_throw(self, sleep):
+ """
+ Throw should be passed through retry into target generator
+ """
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=retry.if_exception_type(ValueError),
+ )
+ decorated = retry_(self._generator_mock)
+
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ with pytest.raises(BufferError):
+ generator.throw(BufferError("test"))
+ assert isinstance(exception_list[0], BufferError)
+ with pytest.raises(StopIteration):
+ # calling next on closed generator should raise error
+ next(generator)
+ # should retry if throw retryable exception
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ val = generator.throw(ValueError("test"))
+ assert val == 0
+ assert isinstance(exception_list[0], ValueError)
+ # calling next on closed generator should not raise error
+ assert next(generator) == 1
+
+ def test_exc_factory_non_retryable_error(self):
+ """
+ generator should give the option to override exception creation logic
+ test when non-retryable error is thrown
+ """
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming import retry_target_stream
+
+ timeout = None
+ sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize generator
+ next(generator)
+ # trigger some retryable errors
+ generator.throw(sent_errors[0])
+ generator.throw(sent_errors[1])
+ # trigger a non-retryable error
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ generator.throw(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
+
+ def test_exc_factory_timeout(self):
+ """
+ generator should give the option to override exception creation logic
+ test when timeout is exceeded
+ """
+ import time
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming import retry_target_stream
+
+ timeout = 2
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ with now_patcher as patched_now:
+ timeout = 2
+ sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.TIMEOUT
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ check_timeout_on_yield=True,
+ )
+ # initialize generator
+ next(generator)
+ # trigger some retryable errors
+ generator.throw(sent_errors[0])
+ generator.throw(sent_errors[1])
+ # trigger a timeout
+ patched_now.return_value += timeout + 1
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ generator.throw(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/unit/test_retry.py b/tests/unit/retry/test_retry_unary.py
similarity index 56%
rename from tests/unit/test_retry.py
rename to tests/unit/retry/test_retry_unary.py
index a0160e90..f5bbcff7 100644
--- a/tests/unit/test_retry.py
+++ b/tests/unit/retry/test_retry_unary.py
@@ -13,46 +13,19 @@
# limitations under the License.
import datetime
-import itertools
+import pytest
import re
-import mock
-import pytest
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
from google.api_core import exceptions
from google.api_core import retry
-
-def test_if_exception_type():
- predicate = retry.if_exception_type(ValueError)
-
- assert predicate(ValueError())
- assert not predicate(TypeError())
-
-
-def test_if_exception_type_multiple():
- predicate = retry.if_exception_type(ValueError, TypeError)
-
- assert predicate(ValueError())
- assert predicate(TypeError())
- assert not predicate(RuntimeError())
-
-
-def test_if_transient_error():
- assert retry.if_transient_error(exceptions.InternalServerError(""))
- assert retry.if_transient_error(exceptions.TooManyRequests(""))
- assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
- assert not retry.if_transient_error(exceptions.InvalidArgument(""))
-
-
-# Make uniform return half of its maximum, which will be the calculated
-# sleep time.
-@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
-def test_exponential_sleep_generator_base_2(uniform):
- gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
-
- result = list(itertools.islice(gen, 8))
- assert result == [1, 2, 4, 8, 16, 32, 60, 60]
+from .test_retry_base import Test_BaseRetry
@mock.patch("time.sleep", autospec=True)
@@ -124,138 +97,87 @@ def test_retry_target_non_retryable_error(utcnow, sleep):
sleep.assert_not_called()
+@mock.patch("asyncio.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+@pytest.mark.asyncio
+async def test_retry_target_warning_for_retry(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ target = mock.AsyncMock(spec=["__call__"])
+
+ with pytest.warns(Warning) as exc_info:
+ # Note: predicate is just a filler and doesn't affect the test
+ retry.retry_target(target, predicate, range(10), None)
+
+ assert len(exc_info) == 2
+ assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING
+ sleep.assert_not_called()
+
+
@mock.patch("time.sleep", autospec=True)
-@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
-def test_retry_target_deadline_exceeded(utcnow, sleep):
+@mock.patch("time.monotonic", autospec=True)
+@pytest.mark.parametrize("use_deadline_arg", [True, False])
+def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
predicate = retry.if_exception_type(ValueError)
exception = ValueError("meep")
target = mock.Mock(side_effect=exception)
# Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the deadline.
- utcnow.side_effect = [
- # The first call to utcnow establishes the start of the timeline.
- datetime.datetime.min,
- datetime.datetime.min + datetime.timedelta(seconds=5),
- datetime.datetime.min + datetime.timedelta(seconds=11),
- ]
+ # call takes 6, which puts the retry over the timeout.
+ monotonic.side_effect = [0, 5, 11]
+
+ # support "deadline" as an alias for "timeout"
+ kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10}
with pytest.raises(exceptions.RetryError) as exc_info:
- retry.retry_target(target, predicate, range(10), deadline=10)
+ retry.retry_target(target, predicate, range(10), **kwargs)
assert exc_info.value.cause == exception
- assert exc_info.match("Deadline of 10.0s exceeded")
+ assert exc_info.match("Timeout of 10.0s exceeded")
assert exc_info.match("last exception: meep")
assert target.call_count == 2
+ # Ensure the exception message does not include the target fn:
+ # it may be a partial with user data embedded
+ assert str(target) not in exc_info.exconly()
+
def test_retry_target_bad_sleep_generator():
with pytest.raises(ValueError, match="Sleep generator"):
- retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
-
-
-class TestRetry(object):
- def test_constructor_defaults(self):
- retry_ = retry.Retry()
- assert retry_._predicate == retry.if_transient_error
- assert retry_._initial == 1
- assert retry_._maximum == 60
- assert retry_._multiplier == 2
- assert retry_._deadline == 120
- assert retry_._on_error is None
- assert retry_.deadline == 120
-
- def test_constructor_options(self):
- _some_function = mock.Mock()
+ retry.retry_target(mock.sentinel.target, lambda x: True, [], None)
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=_some_function,
- )
- assert retry_._predicate == mock.sentinel.predicate
- assert retry_._initial == 1
- assert retry_._maximum == 2
- assert retry_._multiplier == 3
- assert retry_._deadline == 4
- assert retry_._on_error is _some_function
- def test_with_deadline(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_deadline(42)
- assert retry_ is not new_retry
- assert new_retry._deadline == 42
-
- # the rest of the attributes should remain the same
- assert new_retry._predicate is retry_._predicate
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_predicate(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_predicate(mock.sentinel.predicate)
- assert retry_ is not new_retry
- assert new_retry._predicate == mock.sentinel.predicate
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_noop(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
+@mock.patch("time.sleep", autospec=True)
+def test_retry_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ exception = ValueError("trigger retry")
+ error_target = mock.Mock(side_effect=exception)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ retry.retry_target(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
)
- new_retry = retry_.with_delay()
- assert retry_ is not new_retry
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
- def test_with_delay(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 3
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
+class TestRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry.Retry(*args, **kwargs)
def test___str__(self):
def if_exception_type(exc):
@@ -268,13 +190,13 @@ def if_exception_type(exc):
initial=1.0,
maximum=60.0,
multiplier=2.0,
- deadline=120.0,
+ timeout=120.0,
on_error=None,
)
assert re.match(
(
r", "
- r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
r"on_error=None>"
),
str(retry_),
@@ -296,11 +218,9 @@ def test___call___and_execute_success(self, sleep):
target.assert_called_once_with("meep")
sleep.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("time.sleep", autospec=True)
def test___call___and_execute_retry(self, sleep, uniform):
-
on_error = mock.Mock(spec=["__call__"], side_effect=[None])
retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
@@ -319,24 +239,19 @@ def test___call___and_execute_retry(self, sleep, uniform):
sleep.assert_called_once_with(retry_._initial)
assert on_error.call_count == 1
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
-
+ def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
retry_ = retry.Retry(
predicate=retry.if_exception_type(ValueError),
initial=1.0,
maximum=1024.0,
multiplier=2.0,
- deadline=9.9,
+ timeout=30.9,
)
- utcnow = datetime.datetime.utcnow()
- utcnow_patcher = mock.patch(
- "google.api_core.datetime_helpers.utcnow", return_value=utcnow
- )
+ monotonic_patcher = mock.patch("time.monotonic", return_value=0)
target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10)
# __name__ is needed by functools.partial.
@@ -345,11 +260,12 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
decorated = retry_(target, on_error=on_error)
target.assert_not_called()
- with utcnow_patcher as patched_utcnow:
+ with monotonic_patcher as patched_monotonic:
# Make sure that calls to fake time.sleep() also advance the mocked
# time clock.
def increase_time(sleep_delay):
- patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
+ patched_monotonic.return_value += sleep_delay
+
sleep.side_effect = increase_time
with pytest.raises(exceptions.RetryError):
@@ -364,8 +280,17 @@ def increase_time(sleep_delay):
last_wait = sleep.call_args.args[0]
total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
- assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
- assert total_wait == 9.9 # the same as the deadline
+ assert last_wait == 8.0
+ # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
+ # we do not even wait for it to be scheduled (30.9 is configured timeout).
+ # This changes the previous logic of shortening the last attempt to fit
+ # in the timeout. The previous logic was removed to make Python retry
+ # logic consistent with the other languages and to not disrupt the
+ # randomized retry delays distribution by artificially increasing a
+ # probability of scheduling two (instead of one) last attempts with very
+ # short delay between them, while the second retry having very low chance
+ # of succeeding anyways.
+ assert total_wait == 15.0
@mock.patch("time.sleep", autospec=True)
def test___init___without_retry_executed(self, sleep):
@@ -390,8 +315,7 @@ def test___init___without_retry_executed(self, sleep):
sleep.assert_not_called()
_some_function.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("time.sleep", autospec=True)
def test___init___when_retry_is_executed(self, sleep, uniform):
_some_function = mock.Mock()
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
index 52215cbd..0e7b018c 100644
--- a/tests/unit/test_bidi.py
+++ b/tests/unit/test_bidi.py
@@ -14,12 +14,22 @@
import datetime
import logging
+import queue
import threading
+import time
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
-import grpc
-import mock
import pytest
-from six.moves import queue
+
+try:
+ import grpc
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import bidi
from google.api_core import exceptions
@@ -121,21 +131,18 @@ class Test_Throttle(object):
def test_repr(self):
delta = datetime.timedelta(seconds=4.5)
instance = bidi._Throttle(access_limit=42, time_window=delta)
- assert repr(instance) == \
- "_Throttle(access_limit=42, time_window={})".format(repr(delta))
+ assert repr(instance) == "_Throttle(access_limit=42, time_window={})".format(
+ repr(delta)
+ )
def test_raises_error_on_invalid_init_arguments(self):
with pytest.raises(ValueError) as exc_info:
- bidi._Throttle(
- access_limit=10, time_window=datetime.timedelta(seconds=0.0)
- )
+ bidi._Throttle(access_limit=10, time_window=datetime.timedelta(seconds=0.0))
assert "time_window" in str(exc_info.value)
assert "must be a positive timedelta" in str(exc_info.value)
with pytest.raises(ValueError) as exc_info:
- bidi._Throttle(
- access_limit=0, time_window=datetime.timedelta(seconds=10)
- )
+ bidi._Throttle(access_limit=0, time_window=datetime.timedelta(seconds=10))
assert "access_limit" in str(exc_info.value)
assert "must be positive" in str(exc_info.value)
@@ -224,18 +231,12 @@ def cancel_side_effect():
class ClosedCall(object):
- # NOTE: This is needed because defining `.next` on an **instance**
- # rather than the **class** will not be iterable in Python 2.
- # This is problematic since a `Mock` just sets members.
-
def __init__(self, exception):
self.exception = exception
def __next__(self):
raise self.exception
- next = __next__ # Python 2
-
def is_active(self):
return False
@@ -296,6 +297,9 @@ def test_close(self):
# ensure the request queue was signaled to stop.
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
+ # ensure request and callbacks are cleaned up
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_close_no_rpc(self):
bidi_rpc = bidi.BidiRpc(None)
@@ -357,8 +361,6 @@ def __next__(self):
raise item
return item
- next = __next__ # Python 2
-
def is_active(self):
return self._is_active
@@ -461,7 +463,9 @@ def test_send_terminate(self):
)
should_recover = mock.Mock(spec=["__call__"], return_value=False)
should_terminate = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover, should_terminate=should_terminate)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, should_terminate=should_terminate
+ )
bidi_rpc.open()
@@ -527,7 +531,9 @@ def test_recv_terminate(self):
)
should_recover = mock.Mock(spec=["__call__"], return_value=False)
should_terminate = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover, should_terminate=should_terminate)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, should_terminate=should_terminate
+ )
bidi_rpc.open()
@@ -621,6 +627,8 @@ def cancel_side_effect():
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
assert bidi_rpc._finalized
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_reopen_failure_on_rpc_restart(self):
error1 = ValueError("1")
@@ -775,6 +783,7 @@ def on_response(response):
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
def test_wake_on_error(self):
should_continue = threading.Event()
@@ -807,6 +816,21 @@ def test_wake_on_error(self):
while consumer.is_active:
pass
+ def test_rpc_callback_fires_when_consumer_start_fails(self):
+ expected_exception = exceptions.InvalidArgument(
+ "test", response=grpc.StatusCode.INVALID_ARGUMENT
+ )
+ callback = mock.Mock(spec=["__call__"])
+
+ rpc, _ = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._start_rpc.side_effect = expected_exception
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response=None)
+ consumer.start()
+ assert callback.call_args.args[0] == grpc.StatusCode.INVALID_ARGUMENT
+
def test_consumer_expected_error(self, caplog):
caplog.set_level(logging.DEBUG)
@@ -843,7 +867,7 @@ def test_consumer_unexpected_error(self, caplog):
# Wait for the consumer's thread to exit.
while consumer.is_active:
- pass
+ pass # pragma: NO COVER (race condition)
on_response.assert_not_called()
bidi_rpc.recv.assert_called_once()
@@ -867,6 +891,30 @@ def close_side_effect():
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
# calling stop twice should not result in an error.
consumer.stop()
+
+ def test_stop_error_logs(self, caplog):
+ """
+ Closing the client should result in no internal error logs
+
+ https://github.com/googleapis/python-api-core/issues/788
+ """
+ caplog.set_level(logging.DEBUG)
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ on_response = mock.Mock(spec=["__call__"])
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+ consumer.stop()
+ # let the background thread run for a while before exiting
+ time.sleep(0.1)
+ bidi_rpc.is_active = False
+ # running thread should not result in error logs
+ error_logs = [r.message for r in caplog.records if r.levelname == "ERROR"]
+ assert not error_logs, f"Found unexpected ERROR logs: {error_logs}"
+ bidi_rpc.is_active = False
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
index 0eb17c5f..3eacabca 100644
--- a/tests/unit/test_client_info.py
+++ b/tests/unit/test_client_info.py
@@ -13,6 +13,11 @@
# limitations under the License.
+try:
+ import grpc
+except ImportError: # pragma: NO COVER
+ grpc = None
+
from google.api_core import client_info
@@ -20,10 +25,16 @@ def test_constructor_defaults():
info = client_info.ClientInfo()
assert info.python_version is not None
- assert info.grpc_version is not None
+
+ if grpc is not None: # pragma: NO COVER
+ assert info.grpc_version is not None
+ else: # pragma: NO COVER
+ assert info.grpc_version is None
+
assert info.api_core_version is not None
assert info.gapic_version is None
assert info.client_library_version is None
+ assert info.rest_version is None
def test_constructor_options():
@@ -33,7 +44,9 @@ def test_constructor_options():
api_core_version="3",
gapic_version="4",
client_library_version="5",
- user_agent="6"
+ user_agent="6",
+ rest_version="7",
+ protobuf_runtime_version="8",
)
assert info.python_version == "1"
@@ -42,11 +55,16 @@ def test_constructor_options():
assert info.gapic_version == "4"
assert info.client_library_version == "5"
assert info.user_agent == "6"
+ assert info.rest_version == "7"
+ assert info.protobuf_runtime_version == "8"
def test_to_user_agent_minimal():
info = client_info.ClientInfo(
- python_version="1", api_core_version="2", grpc_version=None
+ python_version="1",
+ api_core_version="2",
+ grpc_version=None,
+ protobuf_runtime_version=None,
)
user_agent = info.to_user_agent()
@@ -62,8 +80,25 @@ def test_to_user_agent_full():
gapic_version="4",
client_library_version="5",
user_agent="app-name/1.0",
+ protobuf_runtime_version="6",
+ )
+
+ user_agent = info.to_user_agent()
+
+ assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5 pb/6"
+
+
+def test_to_user_agent_rest():
+ info = client_info.ClientInfo(
+ python_version="1",
+ grpc_version=None,
+ rest_version="2",
+ api_core_version="3",
+ gapic_version="4",
+ client_library_version="5",
+ user_agent="app-name/1.0",
)
user_agent = info.to_user_agent()
- assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
+ assert user_agent == "app-name/1.0 gl-python/1 rest/2 gax/3 gapic/4 gccl/5"
diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py
new file mode 100644
index 00000000..b3b0b5c8
--- /dev/null
+++ b/tests/unit/test_client_logging.py
@@ -0,0 +1,140 @@
+import json
+import logging
+from unittest import mock
+
+from google.api_core.client_logging import (
+ setup_logging,
+ initialize_logging,
+ StructuredLogFormatter,
+)
+
+
+def reset_logger(scope):
+ logger = logging.getLogger(scope)
+ logger.handlers = []
+ logger.setLevel(logging.NOTSET)
+ logger.propagate = True
+
+
+def test_setup_logging_w_no_scopes():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging()
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_base_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle")
+ base_logger = logging.getLogger("foogle")
+ assert isinstance(base_logger.handlers[0], logging.StreamHandler)
+ assert not base_logger.propagate
+ assert base_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_configured_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ base_logger = logging.getLogger("foogle")
+ base_logger.propagate = False
+ setup_logging("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_module_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle.bar")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_setup_logging_w_incorrect_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("abc")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope.
+ logger = logging.getLogger("abc")
+ assert isinstance(logger.handlers[0], logging.StreamHandler)
+ assert not logger.propagate
+ assert logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("abc")
+
+
+def test_initialize_logging():
+
+ with mock.patch("os.getenv", return_value="foogle.bar"):
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ initialize_logging()
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified:
+ base_logger.propagate = True
+ module_logger.propagate = True
+
+ initialize_logging()
+
+ assert base_logger.propagate
+ assert module_logger.propagate
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_structured_log_formatter():
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented.
+ record = logging.LogRecord(
+ name="Appelation",
+ level=logging.DEBUG,
+ msg="This is a test message.",
+ pathname="some/path",
+ lineno=25,
+ args=None,
+ exc_info=None,
+ )
+
+ # Extra fields:
+ record.rpcName = "bar"
+
+ formatted_msg = StructuredLogFormatter().format(record)
+ parsed_msg = json.loads(formatted_msg)
+
+ assert parsed_msg["name"] == "Appelation"
+ assert parsed_msg["severity"] == "DEBUG"
+ assert parsed_msg["message"] == "This is a test message."
+ assert parsed_msg["rpcName"] == "bar"
diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py
index 1581c56e..396d6627 100644
--- a/tests/unit/test_client_options.py
+++ b/tests/unit/test_client_options.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from re import match
import pytest
from google.api_core import client_options
@@ -35,7 +36,9 @@ def test_constructor():
scopes=[
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
- ]
+ ],
+ api_audience="foo2.googleapis.com",
+ universe_domain="googleapis.com",
)
assert options.api_endpoint == "foo.googleapis.com"
@@ -46,6 +49,8 @@ def test_constructor():
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
]
+ assert options.api_audience == "foo2.googleapis.com"
+ assert options.universe_domain == "googleapis.com"
def test_constructor_with_encrypted_cert_source():
@@ -72,21 +77,55 @@ def test_constructor_with_both_cert_sources():
)
+def test_constructor_with_api_key():
+
+ options = client_options.ClientOptions(
+ api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert,
+ quota_project_id="quote-proj",
+ api_key="api-key",
+ scopes=[
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ],
+ )
+
+ assert options.api_endpoint == "foo.googleapis.com"
+ assert options.client_cert_source() == (b"cert", b"key")
+ assert options.quota_project_id == "quote-proj"
+ assert options.api_key == "api-key"
+ assert options.scopes == [
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ]
+
+
+def test_constructor_with_both_api_key_and_credentials_file():
+ with pytest.raises(ValueError):
+ client_options.ClientOptions(
+ api_key="api-key",
+ credentials_file="path/to/credentials.json",
+ )
+
+
def test_from_dict():
options = client_options.from_dict(
{
"api_endpoint": "foo.googleapis.com",
+ "universe_domain": "googleapis.com",
"client_cert_source": get_client_cert,
"quota_project_id": "quote-proj",
"credentials_file": "path/to/credentials.json",
"scopes": [
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
- ]
+ ],
+ "api_audience": "foo2.googleapis.com",
}
)
assert options.api_endpoint == "foo.googleapis.com"
+ assert options.universe_domain == "googleapis.com"
assert options.client_cert_source() == (b"cert", b"key")
assert options.quota_project_id == "quote-proj"
assert options.credentials_file == "path/to/credentials.json"
@@ -94,6 +133,8 @@ def test_from_dict():
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
]
+ assert options.api_key is None
+ assert options.api_audience == "foo2.googleapis.com"
def test_from_dict_bad_argument():
@@ -108,10 +149,22 @@ def test_from_dict_bad_argument():
def test_repr():
- options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
-
- assert (
- repr(options)
- == "ClientOptions: {'api_endpoint': 'foo.googleapis.com', 'client_cert_source': None, 'client_encrypted_cert_source': None}"
- or "ClientOptions: {'client_encrypted_cert_source': None, 'client_cert_source': None, 'api_endpoint': 'foo.googleapis.com'}"
+ expected_keys = set(
+ [
+ "api_endpoint",
+ "universe_domain",
+ "client_cert_source",
+ "client_encrypted_cert_source",
+ "quota_project_id",
+ "credentials_file",
+ "scopes",
+ "api_key",
+ "api_audience",
+ ]
)
+ options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
+ options_repr = repr(options)
+ options_keys = vars(options).keys()
+ assert match(r"ClientOptions:", options_repr)
+ assert match(r".*'api_endpoint': 'foo.googleapis.com'.*", options_repr)
+ assert options_keys == expected_keys
diff --git a/tests/unit/test_datetime_helpers.py b/tests/unit/test_datetime_helpers.py
index 4ddcf361..5f5470a6 100644
--- a/tests/unit/test_datetime_helpers.py
+++ b/tests/unit/test_datetime_helpers.py
@@ -16,7 +16,6 @@
import datetime
import pytest
-import pytz
from google.api_core import datetime_helpers
from google.protobuf import timestamp_pb2
@@ -31,7 +30,7 @@ def test_utcnow():
def test_to_milliseconds():
- dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=datetime.timezone.utc)
assert datetime_helpers.to_milliseconds(dt) == 1000
@@ -42,7 +41,7 @@ def test_to_microseconds():
def test_to_microseconds_non_utc():
- zone = pytz.FixedOffset(-1)
+ zone = datetime.timezone(datetime.timedelta(minutes=-1))
dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=zone)
assert datetime_helpers.to_microseconds(dt) == ONE_MINUTE_IN_MICROSECONDS
@@ -56,7 +55,7 @@ def test_to_microseconds_naive():
def test_from_microseconds():
five_mins_from_epoch_in_microseconds = 5 * ONE_MINUTE_IN_MICROSECONDS
five_mins_from_epoch_datetime = datetime.datetime(
- 1970, 1, 1, 0, 5, 0, tzinfo=pytz.utc
+ 1970, 1, 1, 0, 5, 0, tzinfo=datetime.timezone.utc
)
result = datetime_helpers.from_microseconds(five_mins_from_epoch_in_microseconds)
@@ -78,28 +77,28 @@ def test_from_iso8601_time():
def test_from_rfc3339():
value = "2009-12-17T12:44:32.123456Z"
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
)
def test_from_rfc3339_nanos():
value = "2009-12-17T12:44:32.123456Z"
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
)
def test_from_rfc3339_without_nanos():
value = "2009-12-17T12:44:32Z"
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
)
def test_from_rfc3339_nanos_without_nanos():
value = "2009-12-17T12:44:32Z"
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
)
@@ -119,7 +118,7 @@ def test_from_rfc3339_nanos_without_nanos():
def test_from_rfc3339_with_truncated_nanos(truncated, micros):
value = "2009-12-17T12:44:32.{}Z".format(truncated)
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, pytz.utc
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
)
@@ -148,7 +147,7 @@ def test_from_rfc3339_nanos_is_deprecated():
def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros):
value = "2009-12-17T12:44:32.{}Z".format(truncated)
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, pytz.utc
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
)
@@ -171,20 +170,20 @@ def test_to_rfc3339():
def test_to_rfc3339_with_utc():
- value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=pytz.utc)
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=datetime.timezone.utc)
expected = "2016-04-05T13:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
def test_to_rfc3339_with_non_utc():
- zone = pytz.FixedOffset(-60)
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
expected = "2016-04-05T14:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
def test_to_rfc3339_with_non_utc_ignore_zone():
- zone = pytz.FixedOffset(-60)
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
expected = "2016-04-05T13:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=True) == expected
@@ -283,7 +282,7 @@ def test_from_rfc3339_w_invalid():
def test_from_rfc3339_wo_fraction():
timestamp = "2016-12-20T21:13:47Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -292,7 +291,7 @@ def test_from_rfc3339_wo_fraction():
def test_from_rfc3339_w_partial_precision():
timestamp = "2016-12-20T21:13:47.1Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -301,7 +300,7 @@ def test_from_rfc3339_w_partial_precision():
def test_from_rfc3339_w_full_precision():
timestamp = "2016-12-20T21:13:47.123456789Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -332,7 +331,9 @@ def test_timestamp_pb_wo_nanos_naive():
stamp = datetime_helpers.DatetimeWithNanoseconds(
2016, 12, 20, 21, 13, 47, 123456
)
- delta = stamp.replace(tzinfo=pytz.UTC) - datetime_helpers._UTC_EPOCH
+ delta = (
+ stamp.replace(tzinfo=datetime.timezone.utc) - datetime_helpers._UTC_EPOCH
+ )
seconds = int(delta.total_seconds())
nanos = 123456000
timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
@@ -341,7 +342,7 @@ def test_timestamp_pb_wo_nanos_naive():
@staticmethod
def test_timestamp_pb_w_nanos():
stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
)
delta = stamp - datetime_helpers._UTC_EPOCH
timestamp = timestamp_pb2.Timestamp(
@@ -351,7 +352,9 @@ def test_timestamp_pb_w_nanos():
@staticmethod
def test_from_timestamp_pb_wo_nanos():
- when = datetime.datetime(2016, 12, 20, 21, 13, 47, 123456, tzinfo=pytz.UTC)
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
delta = when - datetime_helpers._UTC_EPOCH
seconds = int(delta.total_seconds())
timestamp = timestamp_pb2.Timestamp(seconds=seconds)
@@ -361,11 +364,13 @@ def test_from_timestamp_pb_wo_nanos():
assert _to_seconds(when) == _to_seconds(stamp)
assert stamp.microsecond == 0
assert stamp.nanosecond == 0
- assert stamp.tzinfo == pytz.UTC
+ assert stamp.tzinfo == datetime.timezone.utc
@staticmethod
def test_from_timestamp_pb_w_nanos():
- when = datetime.datetime(2016, 12, 20, 21, 13, 47, 123456, tzinfo=pytz.UTC)
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
delta = when - datetime_helpers._UTC_EPOCH
seconds = int(delta.total_seconds())
timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=123456789)
@@ -375,7 +380,7 @@ def test_from_timestamp_pb_w_nanos():
assert _to_seconds(when) == _to_seconds(stamp)
assert stamp.microsecond == 123456
assert stamp.nanosecond == 123456789
- assert stamp.tzinfo == pytz.UTC
+ assert stamp.tzinfo == datetime.timezone.utc
def _to_seconds(value):
@@ -387,5 +392,5 @@ def _to_seconds(value):
Returns:
int: Microseconds since the unix epoch.
"""
- assert value.tzinfo is pytz.UTC
+ assert value.tzinfo is datetime.timezone.utc
return calendar.timegm(value.timetuple())
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index 040ac8ac..e3f8f909 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -12,14 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import http.client
import json
+from unittest import mock
-import grpc
-import mock
+import pytest
import requests
-from six.moves import http_client
+
+try:
+ import grpc
+ from grpc_status import rpc_status
+except ImportError: # pragma: NO COVER
+ grpc = rpc_status = None
from google.api_core import exceptions
+from google.protobuf import any_pb2, json_format
+from google.rpc import error_details_pb2, status_pb2
def test_create_google_cloud_error():
@@ -33,11 +41,8 @@ def test_create_google_cloud_error():
def test_create_google_cloud_error_with_args():
error = {
- "domain": "global",
- "location": "test",
- "locationType": "testing",
+ "code": 600,
"message": "Testing",
- "reason": "test",
}
response = mock.sentinel.response
exception = exceptions.GoogleAPICallError("Testing", [error], response=response)
@@ -50,8 +55,8 @@ def test_create_google_cloud_error_with_args():
def test_from_http_status():
message = "message"
- exception = exceptions.from_http_status(http_client.NOT_FOUND, message)
- assert exception.code == http_client.NOT_FOUND
+ exception = exceptions.from_http_status(http.client.NOT_FOUND, message)
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == message
assert exception.errors == []
@@ -61,11 +66,11 @@ def test_from_http_status_with_errors_and_response():
errors = ["1", "2"]
response = mock.sentinel.response
exception = exceptions.from_http_status(
- http_client.NOT_FOUND, message, errors=errors, response=response
+ http.client.NOT_FOUND, message, errors=errors, response=response
)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == message
assert exception.errors == errors
assert exception.response == response
@@ -82,7 +87,7 @@ def test_from_http_status_unknown_code():
def make_response(content):
response = requests.Response()
response._content = content
- response.status_code = http_client.NOT_FOUND
+ response.status_code = http.client.NOT_FOUND
response.request = requests.Request(
method="POST", url="https://example.com"
).prepare()
@@ -95,18 +100,19 @@ def test_from_http_response_no_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: unknown error"
assert exception.response == response
def test_from_http_response_text_content():
response = make_response(b"message")
+ response.encoding = "UTF8" # suppress charset_normalizer warning
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: message"
@@ -120,7 +126,7 @@ def test_from_http_response_json_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: json message"
assert exception.errors == ["1", "2"]
@@ -131,36 +137,50 @@ def test_from_http_response_bad_json_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: unknown error"
def test_from_http_response_json_unicode_content():
response = make_response(
json.dumps(
- {"error": {"message": u"\u2019 message", "errors": ["1", "2"]}}
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
).encode("utf-8")
)
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
- assert exception.message == u"POST https://example.com/: \u2019 message"
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: \u2019 message"
assert exception.errors == ["1", "2"]
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status():
message = "message"
exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message)
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.OutOfRange)
- assert exception.code == http_client.BAD_REQUEST
+ assert exception.code == http.client.BAD_REQUEST
+ assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
+ assert exception.message == message
+ assert exception.errors == []
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_status_as_int():
+ message = "message"
+ exception = exceptions.from_grpc_status(11, message)
+ assert isinstance(exception, exceptions.BadRequest)
+ assert isinstance(exception, exceptions.OutOfRange)
+ assert exception.code == http.client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
assert exception.message == message
assert exception.errors == []
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status_with_errors_and_response():
message = "message"
response = mock.sentinel.response
@@ -175,6 +195,7 @@ def test_from_grpc_status_with_errors_and_response():
assert exception.response == response
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status_unknown_code():
message = "message"
exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message)
@@ -182,6 +203,7 @@ def test_from_grpc_status_unknown_code():
assert exception.message == message
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_error():
message = "message"
error = mock.create_autospec(grpc.Call, instance=True)
@@ -192,13 +214,14 @@ def test_from_grpc_error():
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.InvalidArgument)
- assert exception.code == http_client.BAD_REQUEST
+ assert exception.code == http.client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_error_non_call():
message = "message"
error = mock.create_autospec(grpc.RpcError, instance=True)
@@ -212,3 +235,161 @@ def test_from_grpc_error_non_call():
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_error_bare_call():
+ message = "Testing"
+
+ class TestingError(grpc.Call, grpc.RpcError):
+ def __init__(self, exception):
+ self.exception = exception
+
+ def code(self):
+ return self.exception.grpc_status_code
+
+ def details(self):
+ return message
+
+ nested_message = "message"
+ error = TestingError(exceptions.GoogleAPICallError(nested_message))
+
+ exception = exceptions.from_grpc_error(error)
+
+ assert isinstance(exception, exceptions.GoogleAPICallError)
+ assert exception.code is None
+ assert exception.grpc_status_code is None
+ assert exception.message == message
+ assert exception.errors == [error]
+ assert exception.response == error
+ assert exception.details == []
+
+
+def create_bad_request_details():
+ bad_request_details = error_details_pb2.BadRequest()
+ field_violation = bad_request_details.field_violations.add()
+ field_violation.field = "document.content"
+ field_violation.description = "Must have some text content to annotate."
+ status_detail = any_pb2.Any()
+ status_detail.Pack(bad_request_details)
+ return status_detail
+
+
+def create_error_info_details():
+ info = error_details_pb2.ErrorInfo(
+ reason="SERVICE_DISABLED",
+ domain="googleapis.com",
+ metadata={
+ "consumer": "projects/455411330361",
+ "service": "translate.googleapis.com",
+ },
+ )
+ status_detail = any_pb2.Any()
+ status_detail.Pack(info)
+ return status_detail
+
+
+def test_error_details_from_rest_response():
+ bad_request_detail = create_bad_request_details()
+ error_info_detail = create_error_info_details()
+ status = status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(bad_request_detail)
+ status.details.append(error_info_detail)
+
+ # See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping
+ http_response = make_response(
+ json.dumps(
+ {"error": json.loads(json_format.MessageToJson(status, sort_keys=True))}
+ ).encode("utf-8")
+ )
+ exception = exceptions.from_http_response(http_response)
+ want_error_details = [
+ json.loads(json_format.MessageToJson(bad_request_detail)),
+ json.loads(json_format.MessageToJson(error_info_detail)),
+ ]
+ assert want_error_details == exception.details
+
+ # 404 POST comes from make_response.
+ assert str(exception) == (
+ "404 POST https://example.com/: 3 INVALID_ARGUMENT:"
+ " One of content, or gcs_content_uri must be set."
+ " [{'@type': 'type.googleapis.com/google.rpc.BadRequest',"
+ " 'fieldViolations': [{'description': 'Must have some text content to annotate.',"
+ " 'field': 'document.content'}]},"
+ " {'@type': 'type.googleapis.com/google.rpc.ErrorInfo',"
+ " 'domain': 'googleapis.com',"
+ " 'metadata': {'consumer': 'projects/455411330361',"
+ " 'service': 'translate.googleapis.com'},"
+ " 'reason': 'SERVICE_DISABLED'}]"
+ )
+
+
+def test_error_details_from_v1_rest_response():
+ response = make_response(
+ json.dumps(
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
+ ).encode("utf-8")
+ )
+ exception = exceptions.from_http_response(response)
+ assert exception.details == []
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response():
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status_br_detail = create_bad_request_details()
+ status_ei_detail = create_error_info_details()
+ status.details.append(status_br_detail)
+ status.details.append(status_ei_detail)
+
+ # The actual error doesn't matter as long as its grpc.Call,
+ # because from_call is mocked.
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+
+ bad_request_detail = error_details_pb2.BadRequest()
+ error_info_detail = error_details_pb2.ErrorInfo()
+ status_br_detail.Unpack(bad_request_detail)
+ status_ei_detail.Unpack(error_info_detail)
+ assert exception.details == [bad_request_detail, error_info_detail]
+ assert exception.reason == error_info_detail.reason
+ assert exception.domain == error_info_detail.domain
+ assert exception.metadata == error_info_detail.metadata
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response_unknown_error():
+ status_detail = any_pb2.Any()
+
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(status_detail)
+
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+ assert exception.details == [status_detail]
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py
new file mode 100644
index 00000000..ab550662
--- /dev/null
+++ b/tests/unit/test_extended_operation.py
@@ -0,0 +1,246 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import dataclasses
+import enum
+import typing
+from unittest import mock
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import extended_operation
+from google.api_core import retry
+
+TEST_OPERATION_NAME = "test/extended_operation"
+
+
+@dataclasses.dataclass(frozen=True)
+class CustomOperation:
+ class StatusCode(enum.Enum):
+ UNKNOWN = 0
+ DONE = 1
+ PENDING = 2
+
+ class LROCustomErrors:
+ class LROCustomError:
+ def __init__(self, code: str = "", message: str = ""):
+ self.code = code
+ self.message = message
+
+ def __init__(self, errors: typing.List[LROCustomError] = []):
+ self.errors = errors
+
+ name: str
+ status: StatusCode
+ error_code: typing.Optional[int] = None
+ error_message: typing.Optional[str] = None
+ armor_class: typing.Optional[int] = None
+ # Note: `error` can be removed once proposal A from
+ # b/284179390 is implemented.
+ error: typing.Optional[LROCustomErrors] = None
+
+ # Note: in generated clients, this property must be generated for each
+ # extended operation message type.
+ # The status may be an enum, a string, or a bool. If it's a string or enum,
+ # its text is compared to the string "DONE".
+ @property
+ def done(self):
+ return self.status.name == "DONE"
+
+
+def make_extended_operation(responses=None):
+ client_operations_responses = responses or [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ )
+ ]
+
+ refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
+ refresh.responses = client_operations_responses
+ cancel = mock.Mock(spec=["__call__"])
+ extended_operation_future = extended_operation.ExtendedOperation.make(
+ refresh,
+ cancel,
+ client_operations_responses[0],
+ )
+
+ return extended_operation_future, refresh, cancel
+
+
+def test_constructor():
+ ex_op, refresh, _ = make_extended_operation()
+ assert ex_op._extended_operation == refresh.responses[0]
+ assert not ex_op.cancelled()
+ assert not ex_op.done()
+ assert ex_op.name == TEST_OPERATION_NAME
+ assert ex_op.status == CustomOperation.StatusCode.PENDING
+ assert ex_op.error_code is None
+ assert ex_op.error_message is None
+
+
+def test_done():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation has finished.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ # Bumper to make sure we stop polling on DONE.
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_message="Gone too far!",
+ ),
+ ]
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ # Start out not done.
+ assert not ex_op.done()
+ assert refresh.call_count == 1
+
+ # Refresh brings us to the done state.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+ # Make sure that subsequent checks are no-ops.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+
+def test_cancellation():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation was cancelled.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+ ex_op, _, cancel = make_extended_operation(responses)
+
+ assert not ex_op.cancelled()
+
+ assert ex_op.cancel()
+ assert ex_op.cancelled()
+ cancel.assert_called_once_with()
+
+ # Cancelling twice should have no effect.
+ assert not ex_op.cancel()
+ cancel.assert_called_once_with()
+
+
+def test_done_w_retry():
+ # Not sure what's going on here with the coverage, so just ignore it.
+ test_retry = retry.Retry(predicate=lambda x: True) # pragma: NO COVER
+
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ ex_op.done(retry=test_retry)
+
+ refresh.assert_called_once_with(retry=test_retry)
+
+
+def test_error():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=400,
+ error_message="Bad request",
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ # Defaults to CallError when grpc is not installed
+ with pytest.raises(exceptions.BadRequest):
+ ex_op.result()
+
+ # Test GCE custom LRO Error. See b/284179390
+ # Note: This test case can be removed once proposal A from
+ # b/284179390 is implemented.
+ _EXCEPTION_CODE = "INCOMPATIBLE_BACKEND_SERVICES"
+ _EXCEPTION_MESSAGE = "Validation failed for instance group"
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=400,
+ error_message="Bad request",
+ error=CustomOperation.LROCustomErrors(
+ errors=[
+ CustomOperation.LROCustomErrors.LROCustomError(
+ code=_EXCEPTION_CODE, message=_EXCEPTION_MESSAGE
+ )
+ ]
+ ),
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ # Defaults to CallError when grpc is not installed
+ with pytest.raises(
+ exceptions.BadRequest, match=f"{_EXCEPTION_CODE}: {_EXCEPTION_MESSAGE}"
+ ):
+ ex_op.result()
+
+ # Inconsistent result
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=2112,
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ ex_op.result()
+
+
+def test_pass_through():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.PENDING,
+ armor_class=10,
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ armor_class=20,
+ ),
+ ]
+ ex_op, _, _ = make_extended_operation(responses)
+
+ assert ex_op.armor_class == 10
+ ex_op.result()
+ assert ex_op.armor_class == 20
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
index f8fed403..8de9d8c0 100644
--- a/tests/unit/test_grpc_helpers.py
+++ b/tests/unit/test_grpc_helpers.py
@@ -12,10 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import grpc
-import mock
+from unittest import mock
+
import pytest
+try:
+ import grpc
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import grpc_helpers
import google.auth.credentials
@@ -52,6 +57,9 @@ def code(self):
def details(self):
return None
+ def trailing_metadata(self):
+ return None
+
def test_wrap_unary_errors():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
@@ -66,6 +74,145 @@ def test_wrap_unary_errors():
assert exc_info.value.response == grpc_error
+class Test_StreamingResponseIterator:
+ @staticmethod
+ def _make_wrapped(*items):
+ return iter(items)
+
+ @staticmethod
+ def _make_one(wrapped, **kw):
+ return grpc_helpers._StreamingResponseIterator(wrapped, **kw)
+
+ def test_ctor_defaults(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iterator._stored_first_result == "a"
+ assert list(wrapped) == ["b", "c"]
+
+ def test_ctor_explicit(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert getattr(iterator, "_stored_first_result", self) is self
+ assert list(wrapped) == ["a", "b", "c"]
+
+ def test_ctor_w_rpc_error_on_prefetch(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+
+ with pytest.raises(grpc.RpcError):
+ self._make_one(wrapped)
+
+ def test___iter__(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iter(iterator) is iterator
+
+ def test___next___w_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert next(iterator) == "a"
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___wo_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "a"
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___w_rpc_error(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ next(iterator)
+
+ def test_add_callback(self):
+ wrapped = mock.MagicMock()
+ callback = mock.Mock(spec={})
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.add_callback(callback) is wrapped.add_callback.return_value
+
+ wrapped.add_callback.assert_called_once_with(callback)
+
+ def test_cancel(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.cancel() is wrapped.cancel.return_value
+
+ wrapped.cancel.assert_called_once_with()
+
+ def test_code(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.code() is wrapped.code.return_value
+
+ wrapped.code.assert_called_once_with()
+
+ def test_details(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.details() is wrapped.details.return_value
+
+ wrapped.details.assert_called_once_with()
+
+ def test_initial_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.initial_metadata() is wrapped.initial_metadata.return_value
+
+ wrapped.initial_metadata.assert_called_once_with()
+
+ def test_is_active(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.is_active() is wrapped.is_active.return_value
+
+ wrapped.is_active.assert_called_once_with()
+
+ def test_time_remaining(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.time_remaining() is wrapped.time_remaining.return_value
+
+ wrapped.time_remaining.assert_called_once_with()
+
+ def test_trailing_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.trailing_metadata() is wrapped.trailing_metadata.return_value
+
+ wrapped.trailing_metadata.assert_called_once_with()
+
+
+class TestGrpcStream(Test_StreamingResponseIterator):
+ @staticmethod
+ def _make_one(wrapped, **kw):
+ return grpc_helpers.GrpcStream(wrapped, **kw)
+
+ def test_grpc_stream_attributes(self):
+ """
+ Should be both a grpc.Call and an iterable
+ """
+ call = self._make_one(None)
+ assert isinstance(call, grpc.Call)
+ # should implement __iter__
+ assert hasattr(call, "__iter__")
+ it = call.__iter__()
+ assert hasattr(it, "__next__")
+
+
def test_wrap_stream_okay():
expected_responses = [1, 2, 3]
callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
@@ -93,7 +240,7 @@ def test_wrap_stream_prefetch_disabled():
callable_.assert_called_once_with(1, 2, three="four")
-def test_wrap_stream_iterable_iterface():
+def test_wrap_stream_iterable_interface():
response_iter = mock.create_autospec(grpc.Call, instance=True)
callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
@@ -219,54 +366,168 @@ def test_wrap_errors_streaming(wrap_stream_errors):
wrap_stream_errors.assert_called_once_with(callable_)
-@mock.patch("grpc.composite_channel_credentials")
+@pytest.mark.parametrize(
+ "attempt_direct_path,target,expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
- target = "example.com:443"
+def test_create_channel_implicit(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers.create_channel(target)
+ channel = grpc_helpers.create_channel(
+ target,
+ compression=grpc.Compression.Gzip,
+ attempt_direct_path=attempt_direct_path,
+ )
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=None)
- if grpc_helpers.HAS_GRPC_GCP:
- grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ # The original target is the expected target
+ expected_target = target
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, None
+ )
+ else:
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=grpc.Compression.Gzip
+ )
+
+
+@pytest.mark.parametrize(
+ "attempt_direct_path,target, expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
+@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
+@mock.patch(
+ "google.auth.transport.requests.Request",
+ autospec=True,
+ return_value=mock.sentinel.Request,
+)
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_default_host(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ request,
+ auth_metadata_plugin,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
+ default_host = "example.com"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(
+ target, default_host=default_host, attempt_direct_path=attempt_direct_path
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+ auth_metadata_plugin.assert_called_once_with(
+ mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
+ )
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ # The original target is the expected target
+ expected_target = target
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, None
+ )
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
+@pytest.mark.parametrize(
+ "attempt_direct_path",
+ [
+ None,
+ False,
+ ],
+)
@mock.patch("grpc.composite_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call
+ grpc_secure_channel, default, composite_creds_call, attempt_direct_path
):
target = "example.com:443"
ssl_creds = grpc.ssl_channel_credentials()
- grpc_helpers.create_channel(target, ssl_credentials=ssl_creds)
+ grpc_helpers.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
+ )
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
- default.assert_called_once_with(scopes=None)
composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
composite_creds = composite_creds_call.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
+ target = "example.com:443"
+ ssl_creds = grpc.ssl_channel_credentials()
+ with pytest.raises(
+ ValueError, match="Using ssl_credentials with Direct Path is not supported"
+ ):
+ grpc_helpers.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=True
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
def test_create_channel_implicit_with_scopes(
@@ -278,11 +539,42 @@ def test_create_channel_implicit_with_scopes(
channel = grpc_helpers.create_channel(target, scopes=["one", "two"])
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=["one", "two"])
- if grpc_helpers.HAS_GRPC_GCP:
+
+ default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_default_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, default_scopes=["three", "four"])
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
def test_create_channel_explicit_with_duplicate_credentials():
@@ -292,12 +584,12 @@ def test_create_channel_explicit_with_duplicate_credentials():
grpc_helpers.create_channel(
target,
credentials_file="credentials.json",
- credentials=mock.sentinel.credentials
+ credentials=mock.sentinel.credentials,
)
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("google.auth.credentials.with_scopes_if_required")
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
target = "example.com:443"
@@ -305,15 +597,21 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred
channel = grpc_helpers.create_channel(target, credentials=mock.sentinel.credentials)
- auth_creds.assert_called_once_with(mock.sentinel.credentials, None)
+ auth_creds.assert_called_once_with(
+ mock.sentinel.credentials, scopes=None, default_scopes=None
+ )
+
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
target = "example.com:443"
@@ -327,69 +625,117 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal
target, credentials=credentials, scopes=scopes
)
- credentials.with_scopes.assert_called_once_with(scopes)
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
-def test_create_channel_explicit_with_quota_project(grpc_secure_channel, composite_creds_call):
+def test_create_channel_explicit_default_scopes(
+ grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
+ default_scopes = ["3", "4"]
composite_creds = composite_creds_call.return_value
- credentials = mock.create_autospec(google.auth.credentials.Credentials, instance=True)
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
channel = grpc_helpers.create_channel(
- target,
- credentials=credentials,
- quota_project_id="project-foo"
+ target, credentials=credentials, default_scopes=default_scopes
+ )
+
+ credentials.with_scopes.assert_called_once_with(
+ scopes=None, default_scopes=default_scopes
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit_with_quota_project(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(
+ google.auth.credentials.CredentialsWithQuotaProject, instance=True
+ )
+
+ channel = grpc_helpers.create_channel(
+ target, credentials=credentials, quota_project_id="project-foo"
)
credentials.with_quota_project.assert_called_once_with("project-foo")
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
- return_value=(mock.sentinel.credentials, mock.sentinel.project)
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-def test_create_channel_with_credentials_file(load_credentials_from_file, grpc_secure_channel, composite_creds_call):
+def test_create_channel_with_credentials_file(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
credentials_file = "/path/to/credentials/file.json"
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers.create_channel(
- target, credentials_file=credentials_file
- )
+ channel = grpc_helpers.create_channel(target, credentials_file=credentials_file)
- google.auth.load_credentials_from_file.assert_called_once_with(credentials_file, scopes=None)
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=None
+ )
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
-@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
- return_value=(mock.sentinel.credentials, mock.sentinel.project)
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-def test_create_channel_with_credentials_file_and_scopes(load_credentials_from_file, grpc_secure_channel, composite_creds_call):
+def test_create_channel_with_credentials_file_and_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
target = "example.com:443"
scopes = ["1", "2"]
@@ -400,19 +746,59 @@ def test_create_channel_with_credentials_file_and_scopes(load_credentials_from_f
target, credentials_file=credentials_file, scopes=scopes
)
- google.auth.load_credentials_from_file.assert_called_once_with(credentials_file, scopes=scopes)
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=scopes, default_scopes=None
+ )
+
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
+
+
+@mock.patch("grpc.compute_engine_channel_credentials")
+@mock.patch("grpc.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_default_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(
+ target, credentials_file=credentials_file, default_scopes=default_scopes
+ )
+
+ load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=default_scopes
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
@pytest.mark.skipif(
not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available"
)
@mock.patch("grpc_gcp.secure_channel")
-def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel):
+def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): # pragma: NO COVER
target = "example.com:443"
scopes = ["test_scope"]
@@ -421,7 +807,8 @@ def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel):
grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
grpc_gcp_secure_channel.assert_called()
- credentials.with_scopes.assert_called_once_with(scopes)
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
@pytest.mark.skipif(grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available")
@@ -435,7 +822,8 @@ def test_create_channel_without_grpc_gcp(grpc_secure_channel):
grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
grpc_secure_channel.assert_called()
- credentials.with_scopes.assert_called_once_with(scopes)
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
class TestChannelStub(object):
@@ -533,6 +921,7 @@ def test_call_info(self):
stub = operations_pb2.OperationsStub(channel)
expected_request = operations_pb2.GetOperationRequest(name="meep")
expected_response = operations_pb2.Operation(name="moop")
+ expected_compression = grpc.Compression.NoCompression
expected_metadata = [("red", "blue"), ("two", "shoe")]
expected_credentials = mock.sentinel.credentials
channel.GetOperation.response = expected_response
@@ -540,6 +929,7 @@ def test_call_info(self):
response = stub.GetOperation(
expected_request,
timeout=42,
+ compression=expected_compression,
metadata=expected_metadata,
credentials=expected_credentials,
)
@@ -547,7 +937,13 @@ def test_call_info(self):
assert response == expected_response
assert channel.requests == [("GetOperation", expected_request)]
assert channel.GetOperation.calls == [
- (expected_request, 42, expected_metadata, expected_credentials)
+ (
+ expected_request,
+ 42,
+ expected_metadata,
+ expected_credentials,
+ expected_compression,
+ )
]
def test_unary_unary(self):
diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py
index 896e10de..3de15288 100644
--- a/tests/unit/test_iam.py
+++ b/tests/unit/test_iam.py
@@ -55,6 +55,15 @@ def test___getitem___miss(self):
policy = self._make_one()
assert policy["nonesuch"] == set()
+ def test__getitem___and_set(self):
+ from google.api_core.iam import OWNER_ROLE
+
+ policy = self._make_one()
+
+ # get the policy using the getter and then modify it
+ policy[OWNER_ROLE].add("user:phred@example.com")
+ assert dict(policy) == {OWNER_ROLE: {"user:phred@example.com"}}
+
def test___getitem___version3(self):
policy = self._make_one("DEADBEEF", 3)
with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
@@ -112,7 +121,7 @@ def test___delitem___hit(self):
policy = self._make_one()
policy.bindings = [
{"role": "to/keep", "members": set(["phred@example.com"])},
- {"role": "to/remove", "members": set(["phred@example.com"])}
+ {"role": "to/remove", "members": set(["phred@example.com"])},
]
del policy["to/remove"]
assert len(policy) == 1
@@ -142,7 +151,9 @@ def test_bindings_property(self):
USER = "user:phred@example.com"
CONDITION = {"expression": "2 > 1"}
policy = self._make_one()
- BINDINGS = [{"role": "role/reader", "members": set([USER]), "condition": CONDITION}]
+ BINDINGS = [
+ {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
+ ]
policy.bindings = BINDINGS
assert policy.bindings == BINDINGS
@@ -156,14 +167,15 @@ def test_owners_getter(self):
assert policy.owners == expected
def test_owners_setter(self):
- import warnings
from google.api_core.iam import OWNER_ROLE
MEMBER = "user:phred@example.com"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'owners' is deprecated."
+ ) as warned:
policy.owners = [MEMBER]
(warning,) = warned
@@ -180,14 +192,15 @@ def test_editors_getter(self):
assert policy.editors == expected
def test_editors_setter(self):
- import warnings
from google.api_core.iam import EDITOR_ROLE
MEMBER = "user:phred@example.com"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'editors' is deprecated."
+ ) as warned:
policy.editors = [MEMBER]
(warning,) = warned
@@ -204,14 +217,15 @@ def test_viewers_getter(self):
assert policy.viewers == expected
def test_viewers_setter(self):
- import warnings
from google.api_core.iam import VIEWER_ROLE
MEMBER = "user:phred@example.com"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'viewers' is deprecated."
+ ) as warned:
policy.viewers = [MEMBER]
(warning,) = warned
@@ -219,72 +233,36 @@ def test_viewers_setter(self):
assert policy[VIEWER_ROLE] == expected
def test_user(self):
- import warnings
-
EMAIL = "phred@example.com"
MEMBER = "user:%s" % (EMAIL,)
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.user(EMAIL) == MEMBER
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.user(EMAIL) == MEMBER
def test_service_account(self):
- import warnings
-
EMAIL = "phred@example.com"
MEMBER = "serviceAccount:%s" % (EMAIL,)
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.service_account(EMAIL) == MEMBER
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.service_account(EMAIL) == MEMBER
def test_group(self):
- import warnings
-
EMAIL = "phred@example.com"
MEMBER = "group:%s" % (EMAIL,)
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.group(EMAIL) == MEMBER
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.group(EMAIL) == MEMBER
def test_domain(self):
- import warnings
-
DOMAIN = "example.com"
MEMBER = "domain:%s" % (DOMAIN,)
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.domain(DOMAIN) == MEMBER
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.domain(DOMAIN) == MEMBER
def test_all_users(self):
- import warnings
-
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.all_users() == "allUsers"
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.all_users() == "allUsers"
def test_authenticated_users(self):
- import warnings
-
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
- assert policy.authenticated_users() == "allAuthenticatedUsers"
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
+ assert policy.authenticated_users() == "allAuthenticatedUsers"
def test_from_api_repr_only_etag(self):
empty = frozenset()
@@ -362,12 +340,13 @@ def test_to_api_repr_binding_wo_members(self):
assert policy.to_api_repr() == {}
def test_to_api_repr_binding_w_duplicates(self):
- import warnings
from google.api_core.iam import OWNER_ROLE
OWNER = "group:cloud-logs@google.com"
policy = self._make_one()
- with warnings.catch_warnings(record=True):
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'owners' is deprecated."
+ ):
policy.owners = [OWNER, OWNER]
assert policy.to_api_repr() == {
"bindings": [{"role": OWNER_ROLE, "members": [OWNER]}]
@@ -386,13 +365,17 @@ def test_to_api_repr_full(self):
CONDITION = {
"title": "title",
"description": "description",
- "expression": "true"
+ "expression": "true",
}
BINDINGS = [
{"role": OWNER_ROLE, "members": [OWNER1, OWNER2]},
{"role": EDITOR_ROLE, "members": [EDITOR1, EDITOR2]},
{"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2]},
- {"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2], "condition": CONDITION},
+ {
+ "role": VIEWER_ROLE,
+ "members": [VIEWER1, VIEWER2],
+ "condition": CONDITION,
+ },
]
policy = self._make_one("DEADBEEF", 1)
policy.bindings = BINDINGS
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
index 14b95cbb..80680720 100644
--- a/tests/unit/test_operation.py
+++ b/tests/unit/test_operation.py
@@ -13,7 +13,14 @@
# limitations under the License.
-import mock
+from unittest import mock
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError: # pragma: NO COVER
+ pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import exceptions
from google.api_core import operation
@@ -146,6 +153,23 @@ def test_exception():
assert expected_exception.message in "{!r}".format(exception)
+def test_exception_with_error_code():
+ expected_exception = status_pb2.Status(message="meep", code=5)
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the error.
+ make_operation_proto(done=True, error=expected_exception),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = future.exception()
+
+ assert expected_exception.message in "{!r}".format(exception)
+ # Status Code 5 maps to Not Found
+ # https://developers.google.com/maps-booking/reference/grpc-api/status_codes
+ assert isinstance(exception, exceptions.NotFound)
+
+
def test_unexpected_result():
responses = [
make_operation_proto(),
@@ -160,17 +184,39 @@ def test_unexpected_result():
def test__refresh_http():
- api_request = mock.Mock(return_value={"name": TEST_OPERATION_NAME, "done": True})
+ json_response = {"name": TEST_OPERATION_NAME, "done": True}
+ api_request = mock.Mock(return_value=json_response)
result = operation._refresh_http(api_request, TEST_OPERATION_NAME)
+ assert isinstance(result, operations_pb2.Operation)
assert result.name == TEST_OPERATION_NAME
assert result.done is True
+
api_request.assert_called_once_with(
method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
)
+def test__refresh_http_w_retry():
+ json_response = {"name": TEST_OPERATION_NAME, "done": True}
+ api_request = mock.Mock()
+ retry = mock.Mock()
+ retry.return_value.return_value = json_response
+
+ result = operation._refresh_http(api_request, TEST_OPERATION_NAME, retry=retry)
+
+ assert isinstance(result, operations_pb2.Operation)
+ assert result.name == TEST_OPERATION_NAME
+ assert result.done is True
+
+ api_request.assert_not_called()
+ retry.assert_called_once_with(api_request)
+ retry.return_value.assert_called_once_with(
+ method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
+ )
+
+
def test__cancel_http():
api_request = mock.Mock()
@@ -207,6 +253,21 @@ def test__refresh_grpc():
operations_stub.GetOperation.assert_called_once_with(expected_request)
+def test__refresh_grpc_w_retry():
+ operations_stub = mock.Mock(spec=["GetOperation"])
+ expected_result = make_operation_proto(done=True)
+ retry = mock.Mock()
+ retry.return_value.return_value = expected_result
+
+ result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME, retry=retry)
+
+ assert result == expected_result
+ expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
+ operations_stub.GetOperation.assert_not_called()
+ retry.assert_called_once_with(operations_stub.GetOperation)
+ retry.return_value.assert_called_once_with(expected_request)
+
+
def test__cancel_grpc():
operations_stub = mock.Mock(spec=["CancelOperation"])
@@ -225,12 +286,15 @@ def test_from_grpc():
operations_stub,
struct_pb2.Struct,
metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
)
assert future._result_type == struct_pb2.Struct
assert future._metadata_type == struct_pb2.Struct
assert future.operation.name == TEST_OPERATION_NAME
assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
def test_from_gapic():
@@ -244,12 +308,15 @@ def test_from_gapic():
operations_client,
struct_pb2.Struct,
metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
)
assert future._result_type == struct_pb2.Struct
assert future._metadata_type == struct_pb2.Struct
assert future.operation.name == TEST_OPERATION_NAME
assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
def test_deserialize():
diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py
new file mode 100644
index 00000000..8100a496
--- /dev/null
+++ b/tests/unit/test_packaging.py
@@ -0,0 +1,28 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+
+def test_namespace_package_compat(tmp_path):
+ # The ``google`` namespace package should not be masked
+ # by the presence of ``google-api-core``.
+ google = tmp_path / "google"
+ google.mkdir()
+ google.joinpath("othermod.py").write_text("")
+ env = dict(os.environ, PYTHONPATH=str(tmp_path))
+ cmd = [sys.executable, "-m", "google.othermod"]
+ subprocess.check_call(cmd, env=env)
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
index 2bf74249..560722c5 100644
--- a/tests/unit/test_page_iterator.py
+++ b/tests/unit/test_page_iterator.py
@@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import math
import types
+from unittest import mock
-import mock
import pytest
-import six
from google.api_core import page_iterator
@@ -55,17 +55,17 @@ def test_iterator_calls_parent_item_to_value(self):
assert item_to_value.call_count == 0
assert page.remaining == 100
- assert six.next(page) == 10
+ assert next(page) == 10
assert item_to_value.call_count == 1
item_to_value.assert_called_with(parent, 10)
assert page.remaining == 99
- assert six.next(page) == 11
+ assert next(page) == 11
assert item_to_value.call_count == 2
item_to_value.assert_called_with(parent, 11)
assert page.remaining == 98
- assert six.next(page) == 12
+ assert next(page) == 12
assert item_to_value.call_count == 3
item_to_value.assert_called_with(parent, 12)
assert page.remaining == 97
@@ -108,6 +108,26 @@ def test_constructor(self):
assert iterator.next_page_token == token
assert iterator.num_results == 0
+ def test_next(self):
+ iterator = PageIteratorImpl(None, None)
+ page_1 = page_iterator.Page(
+ iterator, ("item 1.1", "item 1.2"), page_iterator._item_to_value_identity
+ )
+ page_2 = page_iterator.Page(
+ iterator, ("item 2.1",), page_iterator._item_to_value_identity
+ )
+ iterator._next_page = mock.Mock(side_effect=[page_1, page_2, None])
+
+ result = next(iterator)
+ assert result == "item 1.1"
+ result = next(iterator)
+ assert result == "item 1.2"
+ result = next(iterator)
+ assert result == "item 2.1"
+
+ with pytest.raises(StopIteration):
+ next(iterator)
+
def test_pages_property_starts(self):
iterator = PageIteratorImpl(None, None)
@@ -129,7 +149,8 @@ def test_pages_property_restart(self):
def test__page_iter_increment(self):
iterator = PageIteratorImpl(None, None)
page = page_iterator.Page(
- iterator, ("item",), page_iterator._item_to_value_identity)
+ iterator, ("item",), page_iterator._item_to_value_identity
+ )
iterator._next_page = mock.Mock(side_effect=[page, None])
assert iterator.num_results == 0
@@ -159,9 +180,11 @@ def test__items_iter(self):
# Make pages from mock responses
parent = mock.sentinel.parent
page1 = page_iterator.Page(
- parent, (item1, item2), page_iterator._item_to_value_identity)
+ parent, (item1, item2), page_iterator._item_to_value_identity
+ )
page2 = page_iterator.Page(
- parent, (item3,), page_iterator._item_to_value_identity)
+ parent, (item3,), page_iterator._item_to_value_identity
+ )
iterator = PageIteratorImpl(None, None)
iterator._next_page = mock.Mock(side_effect=[page1, page2, None])
@@ -173,17 +196,17 @@ def test__items_iter(self):
# Consume items and check the state of the iterator.
assert iterator.num_results == 0
- assert six.next(items_iter) == item1
+ assert next(items_iter) == item1
assert iterator.num_results == 1
- assert six.next(items_iter) == item2
+ assert next(items_iter) == item2
assert iterator.num_results == 2
- assert six.next(items_iter) == item3
+ assert next(items_iter) == item3
assert iterator.num_results == 3
with pytest.raises(StopIteration):
- six.next(items_iter)
+ next(items_iter)
def test___iter__(self):
iterator = PageIteratorImpl(None, None)
@@ -235,6 +258,7 @@ def test_constructor(self):
assert iterator.page_number == 0
assert iterator.next_page_token is None
assert iterator.num_results == 0
+ assert iterator._page_size is None
def test_constructor_w_extra_param_collision(self):
extra_params = {"pageToken": "val"}
@@ -264,16 +288,16 @@ def test_iterate(self):
items_iter = iter(iterator)
- val1 = six.next(items_iter)
+ val1 = next(items_iter)
assert val1 == item1
assert iterator.num_results == 1
- val2 = six.next(items_iter)
+ val2 = next(items_iter)
assert val2 == item2
assert iterator.num_results == 2
with pytest.raises(StopIteration):
- six.next(items_iter)
+ next(items_iter)
api_request.assert_called_once_with(method="GET", path=path, query_params={})
@@ -432,6 +456,68 @@ def test__get_next_page_bad_http_method(self):
with pytest.raises(ValueError):
iterator._get_next_page_response()
+ @pytest.mark.parametrize(
+ "page_size,max_results,pages",
+ [(3, None, False), (3, 8, False), (3, None, True), (3, 8, True)],
+ )
+ def test_page_size_items(self, page_size, max_results, pages):
+ path = "/foo"
+ NITEMS = 10
+
+ n = [0] # blast you python 2!
+
+ def api_request(*args, **kw):
+ assert not args
+ query_params = dict(
+ maxResults=(
+ page_size
+ if max_results is None
+ else min(page_size, max_results - n[0])
+ )
+ )
+ if n[0]:
+ query_params.update(pageToken="test")
+ assert kw == {"method": "GET", "path": "/foo", "query_params": query_params}
+ n_items = min(kw["query_params"]["maxResults"], NITEMS - n[0])
+ items = [dict(name=str(i + n[0])) for i in range(n_items)]
+ n[0] += n_items
+ result = dict(items=items)
+ if n[0] < NITEMS:
+ result.update(nextPageToken="test")
+ return result
+
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ api_request,
+ path=path,
+ item_to_value=page_iterator._item_to_value_identity,
+ page_size=page_size,
+ max_results=max_results,
+ )
+
+ assert iterator.num_results == 0
+
+ n_results = max_results if max_results is not None else NITEMS
+ if pages:
+ items_iter = iter(iterator.pages)
+ npages = int(math.ceil(float(n_results) / page_size))
+ for ipage in range(npages):
+ assert list(next(items_iter)) == [
+ dict(name=str(i))
+ for i in range(
+ ipage * page_size,
+ min((ipage + 1) * page_size, n_results),
+ )
+ ]
+ else:
+ items_iter = iter(iterator)
+ for i in range(n_results):
+ assert next(items_iter) == dict(name=str(i))
+ assert iterator.num_results == i + 1
+
+ with pytest.raises(StopIteration):
+ next(items_iter)
+
class TestGRPCIterator(object):
def test_constructor(self):
@@ -535,7 +621,7 @@ def __init__(self, pages, page_token=None):
self.page_token = page_token
def next(self):
- return six.next(self._pages)
+ return next(self._pages)
__next__ = next
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
index 4c8a7c5e..c34dd0f3 100644
--- a/tests/unit/test_path_template.py
+++ b/tests/unit/test_path_template.py
@@ -13,10 +13,11 @@
# limitations under the License.
from __future__ import unicode_literals
+from unittest import mock
-import mock
import pytest
+from google.api import auth_pb2
from google.api_core import path_template
@@ -84,6 +85,61 @@ def test_expanded_failure(tmpl, args, kwargs, exc_match):
path_template.expand(tmpl, *args, **kwargs)
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", "stringValue"],
+ [{"field": "stringValue"}, "nosuchfield", None],
+ [{"field": "stringValue"}, "field.subfield", None],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", "stringValue"],
+ [{"field": {"subfield": [1, 2, 3]}}, "field.subfield", [1, 2, 3]],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.nosuchfield", None],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue"}}},
+ "field.subfield.subsubfield",
+ "stringValue",
+ ],
+ ["string", "field", None],
+ ],
+)
+def test_get_field(request_obj, field, expected_result):
+ result = path_template.get_field(request_obj, field)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", {}],
+ [{"field": "stringValue"}, "nosuchfield", {"field": "stringValue"}],
+ [{"field": "stringValue"}, "field.subfield", {"field": "stringValue"}],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", {"field": {}}],
+ [
+ {"field": {"subfield": "stringValue", "q": "w"}, "e": "f"},
+ "field.subfield",
+ {"field": {"q": "w"}, "e": "f"},
+ ],
+ [
+ {"field": {"subfield": "stringValue"}},
+ "field.nosuchfield",
+ {"field": {"subfield": "stringValue"}},
+ ],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue", "q": "w"}}},
+ "field.subfield.subsubfield",
+ {"field": {"subfield": {"q": "w"}}},
+ ],
+ ["string", "field", "string"],
+ ["string", "field.subfield", "string"],
+ ],
+)
+def test_delete_field(request_obj, field, expected_result):
+ path_template.delete_field(request_obj, field)
+ assert request_obj == expected_result
+
+
@pytest.mark.parametrize(
"tmpl, path",
[
@@ -113,3 +169,484 @@ def test__replace_variable_with_pattern():
match.group.return_value = None
with pytest.raises(ValueError, match="Unknown"):
path_template._replace_variable_with_pattern(match)
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/no/template", ""]],
+ None,
+ {"foo": "bar"},
+ ["get", "/v1/no/template", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/no/template", ""]],
+ auth_pb2.AuthenticationRule(selector="bar"),
+ {},
+ [
+ "get",
+ "/v1/no/template",
+ None,
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
+ # Single templates
+ [
+ [["get", "/v1/{field}", ""]],
+ None,
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field.sub}", ""]],
+ None,
+ {"field": {"sub": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ ],
+)
+def test_transcode_base_case(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/{field.subfield}", ""]],
+ None,
+ {"field": {"subfield": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ [
+ [["get", "/v1/{field.subfield.subsubfield}", ""]],
+ None,
+ {"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{field.subfield1}/{field.subfield2}", ""]],
+ None,
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ ["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector}/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="parent",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ [
+ "get",
+ "/v1/parent/child",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
+ ],
+)
+def test_transcode_subfields(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Single segment wildcard
+ [
+ [["get", "/v1/{field=*}", ""]],
+ None,
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=*}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/*/b/*}", ""]],
+ None,
+ {"field": "a/parent/b/child", "foo": "bar"},
+ ["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/*/b/*}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/b/child", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/child",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
+ # Double segment wildcard
+ [
+ [["get", "/v1/{field=**}", ""]],
+ None,
+ {"field": "parent/p1"},
+ ["get", "/v1/parent/p1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=**}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent/p1"),
+ {},
+ ["get", "/v1/parent/p1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/**}", ""]],
+ None,
+ {"field": "a/parent/p1/b/child/c1", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/**/b/**}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child/c1", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/c1",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
+ # Combined single and double segment wildcard
+ [
+ [["get", "/v1/{field=a/*/b/**}", ""]],
+ None,
+ {"field": "a/parent/b/child/c1"},
+ ["get", "/v1/a/parent/b/child/c1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=a/*/b/**}", ""]],
+ auth_pb2.AuthenticationRule(selector="a/parent/b/child/c1"),
+ {},
+ ["get", "/v1/a/parent/b/child/c1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]],
+ None,
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/**/b/*}/v2/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first"),
+ ),
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/v2/first",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_wildcard(
+ http_options, message, request_kwargs, expected_result
+):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Single field body
+ [
+ [["post", "/v1/no/template", "data"]],
+ None,
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ ["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}],
+ ],
+ [
+ [["post", "/v1/no/template", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/no/template",
+ auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "data"]],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"id": 1, "info": "some info"},
+ {"foo": "bar"},
+ ],
+ ],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.OAuthRequirements(),
+ auth_pb2.AuthenticationRule(
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ allow_without_credential=True,
+ ),
+ ],
+ ],
+ # Wildcard body
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "*"]],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_body(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Additional bindings
+ [
+ [
+ ["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"],
+ ["post", "/v1/{field=a/*}/b/{name=**}", "*"],
+ ],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ [
+ [
+ [
+ "post",
+ "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}",
+ "extra_data",
+ ],
+ ["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
+ [
+ [
+ ["get", "/v1/{field=a/*}/b/{name=**}", ""],
+ ["get", "/v1/{field=a/*}/b/first/last", ""],
+ ],
+ None,
+ {"field": "a/parent", "foo": "bar"},
+ ["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}],
+ ],
+ [
+ [
+ ["get", "/v1/{selector=a/*}/b/{oauth.allow_without_credential=**}", ""],
+ ["get", "/v1/{selector=a/*}/b/first/last", ""],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(),
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/first/last",
+ None,
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_additional_bindings(
+ http_options, message, request_kwargs, expected_result
+):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs",
+ [
+ [[["get", "/v1/{name}", ""]], None, {"foo": "bar"}],
+ [[["get", "/v1/{selector}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["get", "/v1/{name=mr/*/*}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector=mr/*/*}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["post", "/v1/{name}", "data"]], None, {"name": "first/last"}],
+ [
+ [["post", "/v1/{selector}", "data"]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
+ [[["post", "/v1/{first_name}", "data"]], None, {"last_name": "last"}],
+ [
+ [["post", "/v1/{first_name}", ""]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
+ ],
+)
+def test_transcode_fails(http_options, message, request_kwargs):
+ http_options, _ = helper_test_transcode(http_options, range(4))
+ with pytest.raises(ValueError) as exc_info:
+ path_template.transcode(http_options, message, **request_kwargs)
+ assert str(exc_info.value).count("URI") == len(http_options)
+
+
+def helper_test_transcode(http_options_list, expected_result_list):
+ http_options = []
+ for opt_list in http_options_list:
+ http_option = {"method": opt_list[0], "uri": opt_list[1]}
+ if opt_list[2]:
+ http_option["body"] = opt_list[2]
+ http_options.append(http_option)
+
+ expected_result = {
+ "method": expected_result_list[0],
+ "uri": expected_result_list[1],
+ "query_params": expected_result_list[3],
+ }
+ if expected_result_list[2]:
+ expected_result["body"] = expected_result_list[2]
+ return (http_options, expected_result)
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
index db972383..5678d3bc 100644
--- a/tests/unit/test_protobuf_helpers.py
+++ b/tests/unit/test_protobuf_helpers.py
@@ -13,6 +13,7 @@
# limitations under the License.
import pytest
+import re
from google.api import http_pb2
from google.api_core import protobuf_helpers
@@ -65,7 +66,12 @@ def test_from_any_pb_failure():
in_message = any_pb2.Any()
in_message.Pack(date_pb2.Date(year=1990))
- with pytest.raises(TypeError):
+ with pytest.raises(
+ TypeError,
+ match=re.escape(
+ "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`"
+ ),
+ ):
protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message)
@@ -472,3 +478,35 @@ def test_field_mask_different_level_diffs():
"alpha",
"red",
]
+
+
+def test_field_mask_ignore_trailing_underscore():
+ import proto
+
+ class Foo(proto.Message):
+ type_ = proto.Field(proto.STRING, number=1)
+ input_config = proto.Field(proto.STRING, number=2)
+
+ modified = Foo(type_="bar", input_config="baz")
+
+ assert sorted(protobuf_helpers.field_mask(None, Foo.pb(modified)).paths) == [
+ "input_config",
+ "type",
+ ]
+
+
+def test_field_mask_ignore_trailing_underscore_with_nesting():
+ import proto
+
+ class Bar(proto.Message):
+ class Baz(proto.Message):
+ input_config = proto.Field(proto.STRING, number=1)
+
+ type_ = proto.Field(Baz, number=1)
+
+ modified = Bar()
+ modified.type_.input_config = "foo"
+
+ assert sorted(protobuf_helpers.field_mask(None, Bar.pb(modified)).paths) == [
+ "type.input_config",
+ ]
diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py
new file mode 100644
index 00000000..ff1a43f0
--- /dev/null
+++ b/tests/unit/test_rest_helpers.py
@@ -0,0 +1,94 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import rest_helpers
+
+
+def test_flatten_simple_value():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params("abc")
+
+
+def test_flatten_list():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params(["abc", "def"])
+
+
+def test_flatten_none():
+ assert rest_helpers.flatten_query_params(None) == []
+
+
+def test_flatten_empty_dict():
+ assert rest_helpers.flatten_query_params({}) == []
+
+
+def test_flatten_simple_dict():
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ("c", True),
+ ("d", False),
+ ("e", 10),
+ ("f", -3.76),
+ ]
+
+
+def test_flatten_simple_dict_strict():
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj, strict=True) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ("c", "true"),
+ ("d", "false"),
+ ("e", "10"),
+ ("f", "-3.76"),
+ ]
+
+
+def test_flatten_repeated_field():
+ assert rest_helpers.flatten_query_params({"a": ["x", "y", "z", None]}) == [
+ ("a", "x"),
+ ("a", "y"),
+ ("a", "z"),
+ ]
+
+
+def test_flatten_nested_dict():
+ obj = {"a": {"b": {"c": ["x", "y", "z"]}}, "d": {"e": "uvw"}}
+ expected_result = [("a.b.c", "x"), ("a.b.c", "y"), ("a.b.c", "z"), ("d.e", "uvw")]
+
+ assert rest_helpers.flatten_query_params(obj) == expected_result
+
+
+def test_flatten_repeated_dict():
+ obj = {
+ "a": {"b": {"c": [{"v": 1}, {"v": 2}]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
+
+
+def test_flatten_repeated_list():
+ obj = {
+ "a": {"b": {"c": [["e", "f"], ["g", "h"]]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py
new file mode 100644
index 00000000..0f998dfe
--- /dev/null
+++ b/tests/unit/test_rest_streaming.py
@@ -0,0 +1,296 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import logging
+import random
+import time
+from typing import List
+from unittest.mock import patch
+
+import proto
+import pytest
+import requests
+
+from google.api_core import rest_streaming
+from google.api import http_pb2
+from google.api import httpbody_pb2
+
+from ..helpers import Composer, Song, EchoResponse, parse_responses
+
+
+__protobuf__ = proto.module(package=__name__)
+SEED = int(time.time())
+logging.info(f"Starting sync rest streaming tests with random seed: {SEED}")
+random.seed(SEED)
+
+
+class ResponseMock(requests.Response):
+ class _ResponseItr:
+ def __init__(self, _response_bytes: bytes, random_split=False):
+ self._responses_bytes = _response_bytes
+ self._i = 0
+ self._random_split = random_split
+
+ def __next__(self):
+ if self._i == len(self._responses_bytes):
+ raise StopIteration
+ if self._random_split:
+ n = random.randint(1, len(self._responses_bytes[self._i :]))
+ else:
+ n = 1
+ x = self._responses_bytes[self._i : self._i + n]
+ self._i += n
+ return x.decode("utf-8")
+
+ def __init__(
+ self,
+ responses: List[proto.Message],
+ response_cls,
+ random_split=False,
+ ):
+ super().__init__()
+ self._responses = responses
+ self._random_split = random_split
+ self._response_message_cls = response_cls
+
+ def _parse_responses(self):
+ return parse_responses(self._response_message_cls, self._responses)
+
+ def close(self):
+ raise NotImplementedError()
+
+ def iter_content(self, *args, **kwargs):
+ return self._ResponseItr(
+ self._parse_responses(),
+ random_split=self._random_split,
+ )
+
+
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [(False, True), (False, False)],
+)
+def test_next_simple(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = EchoResponse
+ responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
+ else:
+ response_type = httpbody_pb2.HttpBody
+ responses = [
+ httpbody_pb2.HttpBody(content_type="hello world"),
+ httpbody_pb2.HttpBody(content_type="yes"),
+ ]
+
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_nested(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="some song", composer=Composer(given_name="some name")),
+ Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
+ ]
+ else:
+ # Although `http_pb2.HttpRule`` is used in the response, any response message
+ # can be used which meets this criteria for the test of having a nested field.
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="some selector",
+ custom=http_pb2.CustomHttpPattern(kind="some kind"),
+ ),
+ http_pb2.HttpRule(
+ selector="another selector",
+ custom=http_pb2.CustomHttpPattern(path="some path"),
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_stress(random_split, resp_message_is_proto_plus):
+ n = 50
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
+ for i in range(n)
+ ]
+ else:
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="selector_%d" % i,
+ custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
+ )
+ for i in range(n)
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize(
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_escaped_characters_in_string(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ composer_with_relateds = Composer()
+ relateds = ["Artist A", "Artist B"]
+ composer_with_relateds.relateds = relateds
+
+ responses = [
+ Song(
+ title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
+ ),
+ Song(
+ title='{"this is weird": "totally"}',
+ composer=Composer(given_name="\\{}\\"),
+ ),
+ Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
+ ]
+ else:
+ response_type = http_pb2.Http
+ responses = [
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='ti"tle\nfoo\tbar{}',
+ custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='{"this is weird": "totally"}',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='\\{"key": ["value",]}\\',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_next_not_array(response_type):
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter('{"hello": 0}')
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()
+
+
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_cancel(response_type):
+ with patch.object(ResponseMock, "close", return_value=None) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ itr.cancel()
+ mock_method.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "response_type,return_value",
+ [
+ (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
+ (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
+ ],
+)
+def test_check_buffer(response_type, return_value):
+ with patch.object(
+ ResponseMock,
+ "_parse_responses",
+ return_value=return_value,
+ ):
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ next(itr)
+
+
+@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_next_html(response_type):
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter("")
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()
+
+
+def test_invalid_response_class():
+ class SomeClass:
+ pass
+
+ resp = ResponseMock(responses=[], response_cls=SomeClass)
+ with pytest.raises(
+ ValueError,
+ match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
+ ):
+ rest_streaming.ResponseIterator(resp, SomeClass)
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
index 30d624e2..2c20202b 100644
--- a/tests/unit/test_timeout.py
+++ b/tests/unit/test_timeout.py
@@ -14,14 +14,13 @@
import datetime
import itertools
+from unittest import mock
-import mock
-
-from google.api_core import timeout
+from google.api_core import timeout as timeouts
def test__exponential_timeout_generator_base_2():
- gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
+ gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
result = list(itertools.islice(gen, 8))
assert result == [1, 2, 4, 8, 16, 32, 60, 60]
@@ -34,7 +33,7 @@ def test__exponential_timeout_generator_base_deadline(utcnow):
datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15)
]
- gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
+ gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
result = list(itertools.islice(gen, 14))
# Should grow until the cumulative time is > 30s, then start decreasing as
@@ -42,22 +41,105 @@ def test__exponential_timeout_generator_base_deadline(utcnow):
assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16]
+class TestTimeToDeadlineTimeout(object):
+ def test_constructor(self):
+ timeout_ = timeouts.TimeToDeadlineTimeout()
+ assert timeout_._timeout is None
+
+ def test_constructor_args(self):
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
+ assert timeout_._timeout == 42.0
+
+ def test___str__(self):
+ timeout_ = timeouts.TimeToDeadlineTimeout(1)
+ assert str(timeout_) == ""
+
+ def test_apply(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+
+ datetime.datetime.now(tz=datetime.timezone.utc)
+ datetime.timedelta(seconds=1)
+
+ now = datetime.datetime.now(tz=datetime.timezone.utc)
+
+ times = [
+ now,
+ now + datetime.timedelta(seconds=0.0009),
+ now + datetime.timedelta(seconds=1),
+ now + datetime.timedelta(seconds=39),
+ now + datetime.timedelta(seconds=42),
+ now + datetime.timedelta(seconds=43),
+ ]
+
+ def _clock():
+ return times.pop(0)
+
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0, _clock)
+ wrapped = timeout_(target)
+
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+ wrapped()
+ target.assert_called_with(timeout=41.0)
+ wrapped()
+ target.assert_called_with(timeout=3.0)
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+
+ def test_apply_no_timeout(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+
+ datetime.datetime.now(tz=datetime.timezone.utc)
+ datetime.timedelta(seconds=1)
+
+ now = datetime.datetime.now(tz=datetime.timezone.utc)
+
+ times = [
+ now,
+ now + datetime.timedelta(seconds=0.0009),
+ now + datetime.timedelta(seconds=1),
+ now + datetime.timedelta(seconds=2),
+ ]
+
+ def _clock():
+ return times.pop(0)
+
+ timeout_ = timeouts.TimeToDeadlineTimeout(clock=_clock)
+ wrapped = timeout_(target)
+
+ wrapped()
+ target.assert_called_with()
+ wrapped()
+ target.assert_called_with()
+
+ def test_apply_passthrough(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
+ wrapped = timeout_(target)
+
+ wrapped(1, 2, meep="moop")
+
+ target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
+
+
class TestConstantTimeout(object):
def test_constructor(self):
- timeout_ = timeout.ConstantTimeout()
+ timeout_ = timeouts.ConstantTimeout()
assert timeout_._timeout is None
def test_constructor_args(self):
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.ConstantTimeout(42.0)
assert timeout_._timeout == 42.0
def test___str__(self):
- timeout_ = timeout.ConstantTimeout(1)
+ timeout_ = timeouts.ConstantTimeout(1)
assert str(timeout_) == ""
def test_apply(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.ConstantTimeout(42.0)
wrapped = timeout_(target)
wrapped()
@@ -66,7 +148,7 @@ def test_apply(self):
def test_apply_passthrough(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.ConstantTimeout(42.0)
wrapped = timeout_(target)
wrapped(1, 2, meep="moop")
@@ -76,30 +158,30 @@ def test_apply_passthrough(self):
class TestExponentialTimeout(object):
def test_constructor(self):
- timeout_ = timeout.ExponentialTimeout()
- assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
- assert timeout_._deadline == timeout._DEFAULT_DEADLINE
+ timeout_ = timeouts.ExponentialTimeout()
+ assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._deadline == timeouts._DEFAULT_DEADLINE
def test_constructor_args(self):
- timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
assert timeout_._initial == 1
assert timeout_._maximum == 2
assert timeout_._multiplier == 3
assert timeout_._deadline == 4
def test_with_timeout(self):
- original_timeout = timeout.ExponentialTimeout()
+ original_timeout = timeouts.ExponentialTimeout()
timeout_ = original_timeout.with_deadline(42)
assert original_timeout is not timeout_
- assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
assert timeout_._deadline == 42
def test___str__(self):
- timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
assert str(timeout_) == (
""
@@ -107,7 +189,7 @@ def test___str__(self):
def test_apply(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ExponentialTimeout(1, 10, 2)
+ timeout_ = timeouts.ExponentialTimeout(1, 10, 2)
wrapped = timeout_(target)
wrapped()
@@ -121,7 +203,7 @@ def test_apply(self):
def test_apply_passthrough(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ExponentialTimeout(42.0, 100, 2)
+ timeout_ = timeouts.ExponentialTimeout(42.0, 100, 2)
wrapped = timeout_(target)
wrapped(1, 2, meep="moop")
diff --git a/tests/unit/test_universe.py b/tests/unit/test_universe.py
new file mode 100644
index 00000000..214e00ac
--- /dev/null
+++ b/tests/unit/test_universe.py
@@ -0,0 +1,63 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+from google.api_core import universe
+
+
+class _Fake_Credentials:
+ def __init__(self, universe_domain=None):
+ if universe_domain:
+ self.universe_domain = universe_domain
+
+
+def test_determine_domain():
+ domain_client = "foo.com"
+ domain_env = "bar.com"
+
+ assert universe.determine_domain(domain_client, domain_env) == domain_client
+ assert universe.determine_domain(None, domain_env) == domain_env
+ assert universe.determine_domain(domain_client, None) == domain_client
+ assert universe.determine_domain(None, None) == universe.DEFAULT_UNIVERSE
+
+ with pytest.raises(universe.EmptyUniverseError):
+ universe.determine_domain("", None)
+
+ with pytest.raises(universe.EmptyUniverseError):
+ universe.determine_domain(None, "")
+
+
+def test_compare_domains():
+ fake_domain = "foo.com"
+ another_fake_domain = "bar.com"
+
+ assert universe.compare_domains(universe.DEFAULT_UNIVERSE, _Fake_Credentials())
+ assert universe.compare_domains(fake_domain, _Fake_Credentials(fake_domain))
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(
+ universe.DEFAULT_UNIVERSE, _Fake_Credentials(fake_domain)
+ )
+ assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
+ assert str(excinfo.value).find(fake_domain) >= 0
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(fake_domain, _Fake_Credentials())
+ assert str(excinfo.value).find(fake_domain) >= 0
+ assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(fake_domain, _Fake_Credentials(another_fake_domain))
+ assert str(excinfo.value).find(fake_domain) >= 0
+ assert str(excinfo.value).find(another_fake_domain) >= 0
diff --git a/tests/unit/test_version_header.py b/tests/unit/test_version_header.py
new file mode 100644
index 00000000..ea7028e2
--- /dev/null
+++ b/tests/unit/test_version_header.py
@@ -0,0 +1,23 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import version_header
+
+
+@pytest.mark.parametrize("version_identifier", ["some_value", ""])
+def test_to_api_version_header(version_identifier):
+ value = version_header.to_api_version_header(version_identifier)
+ assert value == (version_header.API_VERSION_METADATA_KEY, version_identifier)