diff --git a/.coveragerc b/.coveragerc
index d4489ba..5cade22 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,10 +2,9 @@
branch = True
[report]
-fail_under = 100
show_missing = True
omit =
- google/cloud/bigquery/connection/__init__.py
+ google/cloud/bigquery_connection/__init__.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
diff --git a/.flake8 b/.flake8
index ed93163..29227d4 100644
--- a/.flake8
+++ b/.flake8
@@ -26,6 +26,7 @@ exclude =
*_pb2.py
# Standard linting exemptions.
+ **/.nox/**
__pycache__,
.git,
*.pyc,
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 0000000..0954585
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,3 @@
+docker:
+ image: gcr.io/repo-automation-bots/owlbot-python:latest
+ digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
new file mode 100644
index 0000000..fe092d4
--- /dev/null
+++ b/.github/.OwlBot.yaml
@@ -0,0 +1,29 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+ image: gcr.io/repo-automation-bots/owlbot-python:latest
+
+deep-remove-regex:
+ - /owl-bot-staging
+
+deep-preserve-regex:
+ - /owl-bot-staging/v1beta1
+
+deep-copy-regex:
+ - source: /google/cloud/bigquery/connection/(v.*)/.*-py/(.*)
+ dest: /owl-bot-staging/$1/$2
+
+begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66
+
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 4558c4c..ae570eb 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,7 +5,7 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# The @googleapis/api-bigquery is the default owner for changes in this repo
-* @googleapis/api-bigquery
+* @googleapis/api-bigquery @googleapis/yoshi-python
# The python-samples-reviewers team is the default owner for samples changes
/samples/ @googleapis/python-samples-owners
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..6fe78aa
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52..b4243ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index de1d6a1..c9f2ecc 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-bigquery-connection
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-bigquery-connection"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 1118107..4a00c93 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-connection/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 9807e2a..0f64c6b 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
+TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
cd github/python-bigquery-connection
python3 setup.py sdist bdist_wheel
-twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
+twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 02eb4a6..7acf3d5 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,18 +23,8 @@ env_vars: {
value: "github/python-bigquery-connection/.kokoro/release.sh"
}
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
# Tokens needed to report release status back to GitHub
env_vars: {
key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
\ No newline at end of file
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
+}
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 0000000..e8dc291
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-connection/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-connection/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 0000000..3acd37f
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-bigquery-connection
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 0000000..cf5de74
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 31e13d5..1c88307 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -24,87 +28,19 @@ cd github/python-bigquery-connection
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
+exec .kokoro/test-samples-impl.sh
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6ad8334..62eb5a7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,8 +1,22 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.3.0
+ rev: v4.0.1
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@@ -12,6 +26,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.4
+ rev: 3.9.2
hooks:
- id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 7cafc26..20a417c 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -6,6 +6,7 @@
"issue_tracker": "",
"release_level": "ga",
"language": "python",
+"library_type": "GAPIC_AUTO",
"repo": "googleapis/python-bigquery-connection",
"distribution_name": "google-cloud-bigquery-connection",
"api_id": "bigqueryconnection.googleapis.com",
diff --git a/.trampolinerc b/.trampolinerc
index 995ee29..383b6ec 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 859781c..9267ece 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,26 @@
# Changelog
+## [1.1.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.0.1...v1.1.0) (2021-06-23)
+
+
+### Features
+
+* add always_use_jwt_access ([#88](https://www.github.com/googleapis/python-bigquery-connection/issues/88)) ([821fffc](https://www.github.com/googleapis/python-bigquery-connection/commit/821fffcc3f9ecdb222e4a5a2c94ad9c5d3325681))
+* support self-signed JWT flow for service accounts ([2f1db84](https://www.github.com/googleapis/python-bigquery-connection/commit/2f1db842b16cf2c3981c61b503482fa7df85bdfe))
+
+
+### Bug Fixes
+
+* add async client to %name_%version/init.py ([2f1db84](https://www.github.com/googleapis/python-bigquery-connection/commit/2f1db842b16cf2c3981c61b503482fa7df85bdfe))
+* **deps:** add packaging requirement ([#77](https://www.github.com/googleapis/python-bigquery-connection/issues/77)) ([2ab8403](https://www.github.com/googleapis/python-bigquery-connection/commit/2ab84031d3f46b5ccd1acaefe5b744679b43e140))
+* exclude docs and tests from package ([#83](https://www.github.com/googleapis/python-bigquery-connection/issues/83)) ([3ef23e5](https://www.github.com/googleapis/python-bigquery-connection/commit/3ef23e5b9e8f4a0bcef24dbe79773ca92a336ef0))
+
+
+### Documentation
+
+* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-bigquery-connection/issues/1127)) ([#85](https://www.github.com/googleapis/python-bigquery-connection/issues/85)) ([715e04b](https://www.github.com/googleapis/python-bigquery-connection/commit/715e04b77dc352b17e508288a7268c6c2ce46e10)), closes [#1126](https://www.github.com/googleapis/python-bigquery-connection/issues/1126)
+* Update the README to reflect that this library is GA ([#79](https://www.github.com/googleapis/python-bigquery-connection/issues/79)) ([f737861](https://www.github.com/googleapis/python-bigquery-connection/commit/f7378614002697ed5c7dc9217fbe8b48ba7c7410))
+
### [1.0.1](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.0.0...v1.0.1) (2021-02-03)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 55e5868..e046030 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -21,8 +21,8 @@ In order to add a feature:
- The feature must be documented in both the API and narrative
documentation.
-- The feature must work fully on the following CPython versions: 2.7,
- 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows.
+- The feature must work fully on the following CPython versions:
+ 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -69,10 +69,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
- $ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +97,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,34 +141,23 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
- $ nox -s system-2.7
+ # Run all system tests
+ $ nox -s system-3.8
+
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
.. note::
- System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
- of Python 3.
+ System tests are only configured to run under Python 3.8.
+ For expediency, we do not run them in older versions of Python 3.
This alone will not run the tests. You'll need to change some local
auth settings and change some configuration in your project to
run all the tests.
-- System tests will be run against an actual project and
- so you'll need to provide some environment variables to facilitate
- authentication to your project:
-
- - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
- Such a file can be downloaded directly from the developer's console by clicking
- "Generate new JSON key". See private key
- `docs `__
- for more details.
-
-- Once you have downloaded your json keys, set the environment variable
- ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
-
- $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
-
+- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__.
*************
Test Coverage
@@ -202,25 +199,24 @@ Supported Python Versions
We support:
-- `Python 3.5`_
- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
+- `Python 3.9`_
-.. _Python 3.5: https://docs.python.org/3.5/
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-bigquery-connection/blob/master/noxfile.py
-Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
We also explicitly decided to support Python 3 beginning with version
-3.5. Reasons for this include:
+3.6. Reasons for this include:
- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
diff --git a/LICENSE b/LICENSE
index a8ee855..d645695 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1..e783f4c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/README.rst b/README.rst
index 8e5a074..3fb2804 100644
--- a/README.rst
+++ b/README.rst
@@ -1,7 +1,7 @@
Python Client for BigQuery Connection
=================================================
-|beta| |pypi| |versions|
+|GA| |pypi| |versions|
`BigQuery Connection API`_: Manage BigQuery connections to external data sources.
@@ -9,8 +9,8 @@ Python Client for BigQuery Connection
- `Product Documentation`_
- `Introduction to BigQuery external data sources`_
-.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support
+.. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability
.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-connection.svg
:target: https://pypi.org/project/google-cloud-bigquery-connection/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-connection.svg
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..8b58ae9
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf22..b0a2954 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,20 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
+
+/* Insert space between methods for readability */
+dl.method {
+ padding-top: 10px;
+ padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+ padding-bottom: 50px
+}
diff --git a/docs/bigquery_connection_v1/connection_service.rst b/docs/bigquery_connection_v1/connection_service.rst
new file mode 100644
index 0000000..9059205
--- /dev/null
+++ b/docs/bigquery_connection_v1/connection_service.rst
@@ -0,0 +1,10 @@
+ConnectionService
+-----------------------------------
+
+.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service
+ :members:
+ :inherited-members:
+
+.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/bigquery_connection_v1/services.rst b/docs/bigquery_connection_v1/services.rst
index 8ae7bc5..317815a 100644
--- a/docs/bigquery_connection_v1/services.rst
+++ b/docs/bigquery_connection_v1/services.rst
@@ -1,6 +1,6 @@
Services for Google Cloud Bigquery Connection v1 API
====================================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service
- :members:
- :inherited-members:
+ connection_service
diff --git a/docs/bigquery_connection_v1/types.rst b/docs/bigquery_connection_v1/types.rst
index 9c90aa6..76dbc70 100644
--- a/docs/bigquery_connection_v1/types.rst
+++ b/docs/bigquery_connection_v1/types.rst
@@ -3,4 +3,5 @@ Types for Google Cloud Bigquery Connection v1 API
.. automodule:: google.cloud.bigquery_connection_v1.types
:members:
+ :undoc-members:
:show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index b396138..49ed01d 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,17 @@
# -*- coding: utf-8 -*-
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
# google-cloud-bigquery-connection documentation build configuration file
#
@@ -67,9 +80,9 @@
master_doc = "index"
# General information about the project.
-project = u"google-cloud-bigquery-connection"
-copyright = u"2019, Google"
-author = u"Google APIs"
+project = "google-cloud-bigquery-connection"
+copyright = "2019, Google"
+author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -268,7 +281,7 @@
(
master_doc,
"google-cloud-bigquery-connection.tex",
- u"google-cloud-bigquery-connection Documentation",
+ "google-cloud-bigquery-connection Documentation",
author,
"manual",
)
@@ -303,7 +316,7 @@
(
master_doc,
"google-cloud-bigquery-connection",
- u"google-cloud-bigquery-connection Documentation",
+ "google-cloud-bigquery-connection Documentation",
[author],
1,
)
@@ -322,7 +335,7 @@
(
master_doc,
"google-cloud-bigquery-connection",
- u"google-cloud-bigquery-connection Documentation",
+ "google-cloud-bigquery-connection Documentation",
author,
"google-cloud-bigquery-connection",
"google-cloud-bigquery-connection Library",
@@ -350,6 +363,7 @@
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+ "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
index 1cb29d4..536d17b 100644
--- a/docs/multiprocessing.rst
+++ b/docs/multiprocessing.rst
@@ -1,7 +1,7 @@
.. note::
- Because this client uses :mod:`grpcio` library, it is safe to
+ Because this client uses :mod:`grpc` library, it is safe to
share instances across threads. In multiprocessing scenarios, the best
practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
:class:`multiprocessing.Process`.
diff --git a/google/cloud/bigquery_connection/__init__.py b/google/cloud/bigquery_connection/__init__.py
index 591eaac..7e29fe8 100644
--- a/google/cloud/bigquery_connection/__init__.py
+++ b/google/cloud/bigquery_connection/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,12 +14,13 @@
# limitations under the License.
#
-from google.cloud.bigquery_connection_v1.services.connection_service.async_client import (
- ConnectionServiceAsyncClient,
-)
from google.cloud.bigquery_connection_v1.services.connection_service.client import (
ConnectionServiceClient,
)
+from google.cloud.bigquery_connection_v1.services.connection_service.async_client import (
+ ConnectionServiceAsyncClient,
+)
+
from google.cloud.bigquery_connection_v1.types.connection import AwsCrossAccountRole
from google.cloud.bigquery_connection_v1.types.connection import AwsProperties
from google.cloud.bigquery_connection_v1.types.connection import CloudSqlCredential
@@ -34,13 +34,13 @@
from google.cloud.bigquery_connection_v1.types.connection import UpdateConnectionRequest
__all__ = (
+ "ConnectionServiceClient",
+ "ConnectionServiceAsyncClient",
"AwsCrossAccountRole",
"AwsProperties",
"CloudSqlCredential",
"CloudSqlProperties",
"Connection",
- "ConnectionServiceAsyncClient",
- "ConnectionServiceClient",
"CreateConnectionRequest",
"DeleteConnectionRequest",
"GetConnectionRequest",
diff --git a/google/cloud/bigquery_connection_v1/__init__.py b/google/cloud/bigquery_connection_v1/__init__.py
index c3ee8e4..bff96d6 100644
--- a/google/cloud/bigquery_connection_v1/__init__.py
+++ b/google/cloud/bigquery_connection_v1/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,8 @@
#
from .services.connection_service import ConnectionServiceClient
+from .services.connection_service import ConnectionServiceAsyncClient
+
from .types.connection import AwsCrossAccountRole
from .types.connection import AwsProperties
from .types.connection import CloudSqlCredential
@@ -28,18 +29,18 @@
from .types.connection import ListConnectionsResponse
from .types.connection import UpdateConnectionRequest
-
__all__ = (
+ "ConnectionServiceAsyncClient",
"AwsCrossAccountRole",
"AwsProperties",
"CloudSqlCredential",
"CloudSqlProperties",
"Connection",
+ "ConnectionServiceClient",
"CreateConnectionRequest",
"DeleteConnectionRequest",
"GetConnectionRequest",
"ListConnectionsRequest",
"ListConnectionsResponse",
"UpdateConnectionRequest",
- "ConnectionServiceClient",
)
diff --git a/google/cloud/bigquery_connection_v1/gapic_metadata.json b/google/cloud/bigquery_connection_v1/gapic_metadata.json
new file mode 100644
index 0000000..c95fa6e
--- /dev/null
+++ b/google/cloud/bigquery_connection_v1/gapic_metadata.json
@@ -0,0 +1,103 @@
+ {
+ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+ "language": "python",
+ "libraryPackage": "google.cloud.bigquery_connection_v1",
+ "protoPackage": "google.cloud.bigquery.connection.v1",
+ "schema": "1.0",
+ "services": {
+ "ConnectionService": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "ConnectionServiceClient",
+ "rpcs": {
+ "CreateConnection": {
+ "methods": [
+ "create_connection"
+ ]
+ },
+ "DeleteConnection": {
+ "methods": [
+ "delete_connection"
+ ]
+ },
+ "GetConnection": {
+ "methods": [
+ "get_connection"
+ ]
+ },
+ "GetIamPolicy": {
+ "methods": [
+ "get_iam_policy"
+ ]
+ },
+ "ListConnections": {
+ "methods": [
+ "list_connections"
+ ]
+ },
+ "SetIamPolicy": {
+ "methods": [
+ "set_iam_policy"
+ ]
+ },
+ "TestIamPermissions": {
+ "methods": [
+ "test_iam_permissions"
+ ]
+ },
+ "UpdateConnection": {
+ "methods": [
+ "update_connection"
+ ]
+ }
+ }
+ },
+ "grpc-async": {
+ "libraryClient": "ConnectionServiceAsyncClient",
+ "rpcs": {
+ "CreateConnection": {
+ "methods": [
+ "create_connection"
+ ]
+ },
+ "DeleteConnection": {
+ "methods": [
+ "delete_connection"
+ ]
+ },
+ "GetConnection": {
+ "methods": [
+ "get_connection"
+ ]
+ },
+ "GetIamPolicy": {
+ "methods": [
+ "get_iam_policy"
+ ]
+ },
+ "ListConnections": {
+ "methods": [
+ "list_connections"
+ ]
+ },
+ "SetIamPolicy": {
+ "methods": [
+ "set_iam_policy"
+ ]
+ },
+ "TestIamPermissions": {
+ "methods": [
+ "test_iam_permissions"
+ ]
+ },
+ "UpdateConnection": {
+ "methods": [
+ "update_connection"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/google/cloud/bigquery_connection_v1/services/__init__.py b/google/cloud/bigquery_connection_v1/services/__init__.py
index 42ffdf2..4de6597 100644
--- a/google/cloud/bigquery_connection_v1/services/__init__.py
+++ b/google/cloud/bigquery_connection_v1/services/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py b/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py
index c7becdb..9dc1368 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .client import ConnectionServiceClient
from .async_client import ConnectionServiceAsyncClient
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py
index 933c759..6a33f05 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
import functools
import re
@@ -22,19 +20,18 @@
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.bigquery_connection_v1.services.connection_service import pagers
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
from .client import ConnectionServiceClient
@@ -50,42 +47,67 @@ class ConnectionServiceAsyncClient:
connection_path = staticmethod(ConnectionServiceClient.connection_path)
parse_connection_path = staticmethod(ConnectionServiceClient.parse_connection_path)
-
common_billing_account_path = staticmethod(
ConnectionServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
ConnectionServiceClient.parse_common_billing_account_path
)
-
common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
ConnectionServiceClient.parse_common_folder_path
)
-
common_organization_path = staticmethod(
ConnectionServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
ConnectionServiceClient.parse_common_organization_path
)
-
common_project_path = staticmethod(ConnectionServiceClient.common_project_path)
parse_common_project_path = staticmethod(
ConnectionServiceClient.parse_common_project_path
)
-
common_location_path = staticmethod(ConnectionServiceClient.common_location_path)
parse_common_location_path = staticmethod(
ConnectionServiceClient.parse_common_location_path
)
- from_service_account_file = ConnectionServiceClient.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ ConnectionServiceAsyncClient: The constructed client.
+ """
+ return ConnectionServiceClient.from_service_account_info.__func__(ConnectionServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ ConnectionServiceAsyncClient: The constructed client.
+ """
+ return ConnectionServiceClient.from_service_account_file.__func__(ConnectionServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
def transport(self) -> ConnectionServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
ConnectionServiceTransport: The transport used by the client instance.
@@ -99,12 +121,12 @@ def transport(self) -> ConnectionServiceTransport:
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the connection service client.
+ """Instantiates the connection service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -136,7 +158,6 @@ def __init__(
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
-
self._client = ConnectionServiceClient(
credentials=credentials,
transport=transport,
@@ -158,16 +179,17 @@ async def create_connection(
r"""Creates a new connection.
Args:
- request (:class:`~.gcbc_connection.CreateConnectionRequest`):
+ request (:class:`google.cloud.bigquery_connection_v1.types.CreateConnectionRequest`):
The request object. The request for
[ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection].
parent (:class:`str`):
Required. Parent resource name. Must be in the format
``projects/{project_id}/locations/{location_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- connection (:class:`~.gcbc_connection.Connection`):
+ connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`):
Required. Connection to create.
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -175,10 +197,10 @@ async def create_connection(
connection_id (:class:`str`):
Optional. Connection id that should
be assigned to the created connection.
+
This corresponds to the ``connection_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -186,7 +208,7 @@ async def create_connection(
sent along with the request as metadata.
Returns:
- ~.gcbc_connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -206,7 +228,6 @@ async def create_connection(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if connection is not None:
@@ -246,16 +267,16 @@ async def get_connection(
r"""Returns specified connection.
Args:
- request (:class:`~.connection.GetConnectionRequest`):
+ request (:class:`google.cloud.bigquery_connection_v1.types.GetConnectionRequest`):
The request object. The request for
[ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection].
name (:class:`str`):
Required. Name of the requested connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -263,7 +284,7 @@ async def get_connection(
sent along with the request as metadata.
Returns:
- ~.connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -283,7 +304,6 @@ async def get_connection(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -296,8 +316,10 @@ async def get_connection(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -327,16 +349,16 @@ async def list_connections(
r"""Returns a list of connections in the given project.
Args:
- request (:class:`~.connection.ListConnectionsRequest`):
+ request (:class:`google.cloud.bigquery_connection_v1.types.ListConnectionsRequest`):
The request object. The request for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
parent (:class:`str`):
Required. Parent resource name. Must be in the form:
``projects/{project_id}/locations/{location_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -344,7 +366,7 @@ async def list_connections(
sent along with the request as metadata.
Returns:
- ~.pagers.ListConnectionsAsyncPager:
+ google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsAsyncPager:
The response for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
@@ -366,7 +388,6 @@ async def list_connections(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -379,8 +400,10 @@ async def list_connections(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -410,7 +433,7 @@ async def update_connection(
*,
name: str = None,
connection: gcbc_connection.Connection = None,
- update_mask: field_mask.FieldMask = None,
+ update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -420,28 +443,30 @@ async def update_connection(
are in the update field mask.
Args:
- request (:class:`~.gcbc_connection.UpdateConnectionRequest`):
+ request (:class:`google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest`):
The request object. The request for
[ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection].
name (:class:`str`):
Required. Name of the connection to update, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- connection (:class:`~.gcbc_connection.Connection`):
+ connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`):
Required. Connection containing the
updated fields.
+
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.field_mask.FieldMask`):
+ update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. Update mask for the
connection fields to be updated.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -449,7 +474,7 @@ async def update_connection(
sent along with the request as metadata.
Returns:
- ~.gcbc_connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -469,7 +494,6 @@ async def update_connection(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
if connection is not None:
@@ -509,16 +533,16 @@ async def delete_connection(
r"""Deletes connection and associated credential.
Args:
- request (:class:`~.connection.DeleteConnectionRequest`):
+ request (:class:`google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest`):
The request object. The request for
[ConnectionService.DeleteConnectionRequest][].
name (:class:`str`):
Required. Name of the deleted connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -539,7 +563,6 @@ async def delete_connection(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -552,8 +575,10 @@ async def delete_connection(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -572,19 +597,19 @@ async def delete_connection(
async def get_iam_policy(
self,
- request: iam_policy.GetIamPolicyRequest = None,
+ request: iam_policy_pb2.GetIamPolicyRequest = None,
*,
resource: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> policy.Policy:
+ ) -> policy_pb2.Policy:
r"""Gets the access control policy for a resource.
Returns an empty policy if the resource exists and does
not have a policy set.
Args:
- request (:class:`~.iam_policy.GetIamPolicyRequest`):
+ request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`):
The request object. Request message for `GetIamPolicy`
method.
resource (:class:`str`):
@@ -592,10 +617,10 @@ async def get_iam_policy(
policy is being requested. See the
operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -603,72 +628,62 @@ async def get_iam_policy(
sent along with the request as metadata.
Returns:
- ~.policy.Policy:
- Defines an Identity and Access Management (IAM) policy.
- It is used to specify access control policies for Cloud
- Platform resources.
-
- A ``Policy`` is a collection of ``bindings``. A
- ``binding`` binds one or more ``members`` to a single
- ``role``. Members can be user accounts, service
- accounts, Google groups, and domains (such as G Suite).
- A ``role`` is a named list of permissions (defined by
- IAM or configured by users). A ``binding`` can
- optionally specify a ``condition``, which is a logic
- expression that further constrains the role binding
- based on attributes about the request and/or target
- resource.
-
- **JSON Example**
-
- ::
-
- {
- "bindings": [
- {
- "role": "roles/resourcemanager.organizationAdmin",
- "members": [
- "user:mike@example.com",
- "group:admins@example.com",
- "domain:google.com",
- "serviceAccount:my-project-id@appspot.gserviceaccount.com"
- ]
- },
- {
- "role": "roles/resourcemanager.organizationViewer",
- "members": ["user:eve@example.com"],
- "condition": {
- "title": "expirable access",
- "description": "Does not grant access after Sep 2020",
- "expression": "request.time <
- timestamp('2020-10-01T00:00:00.000Z')",
- }
- }
- ]
- }
-
- **YAML Example**
-
- ::
-
- bindings:
- - members:
- - user:mike@example.com
- - group:admins@example.com
- - domain:google.com
- - serviceAccount:my-project-id@appspot.gserviceaccount.com
- role: roles/resourcemanager.organizationAdmin
- - members:
- - user:eve@example.com
- role: roles/resourcemanager.organizationViewer
- condition:
- title: expirable access
- description: Does not grant access after Sep 2020
- expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
-
- For a description of IAM and its features, see the `IAM
- developer's
- guide `__.
+ google.iam.v1.policy_pb2.Policy:
+ Defines an Identity and Access Management (IAM) policy. It is used to
+ specify access control policies for Cloud Platform
+ resources.
+
+ A Policy is a collection of bindings. A binding binds
+ one or more members to a single role. Members can be
+ user accounts, service accounts, Google groups, and
+ domains (such as G Suite). A role is a named list of
+ permissions (defined by IAM or configured by users).
+ A binding can optionally specify a condition, which
+ is a logic expression that further constrains the
+ role binding based on attributes about the request
+ and/or target resource.
+
+ **JSON Example**
+
+ {
+ "bindings": [
+ {
+ "role":
+ "roles/resourcemanager.organizationAdmin",
+ "members": [ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+
+ }, { "role":
+ "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": { "title": "expirable access",
+ "description": "Does not grant access after
+ Sep 2020", "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')", } }
+
+ ]
+
+ }
+
+ **YAML Example**
+
+ bindings: - members: - user:\ mike@example.com -
+ group:\ admins@example.com - domain:google.com -
+ serviceAccount:\ my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin -
+ members: - user:\ eve@example.com role:
+ roles/resourcemanager.organizationViewer
+ condition: title: expirable access description:
+ Does not grant access after Sep 2020 expression:
+ request.time <
+ timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the
+ [IAM developer's
+ guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
@@ -684,10 +699,9 @@ async def get_iam_policy(
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.GetIamPolicyRequest(**request)
-
+ request = iam_policy_pb2.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest(resource=resource,)
+ request = iam_policy_pb2.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -711,13 +725,13 @@ async def get_iam_policy(
async def set_iam_policy(
self,
- request: iam_policy.SetIamPolicyRequest = None,
+ request: iam_policy_pb2.SetIamPolicyRequest = None,
*,
resource: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> policy.Policy:
+ ) -> policy_pb2.Policy:
r"""Sets the access control policy on the specified resource.
Replaces any existing policy.
@@ -725,7 +739,7 @@ async def set_iam_policy(
``PERMISSION_DENIED`` errors.
Args:
- request (:class:`~.iam_policy.SetIamPolicyRequest`):
+ request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`):
The request object. Request message for `SetIamPolicy`
method.
resource (:class:`str`):
@@ -733,10 +747,10 @@ async def set_iam_policy(
policy is being specified. See the
operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -744,72 +758,62 @@ async def set_iam_policy(
sent along with the request as metadata.
Returns:
- ~.policy.Policy:
- Defines an Identity and Access Management (IAM) policy.
- It is used to specify access control policies for Cloud
- Platform resources.
-
- A ``Policy`` is a collection of ``bindings``. A
- ``binding`` binds one or more ``members`` to a single
- ``role``. Members can be user accounts, service
- accounts, Google groups, and domains (such as G Suite).
- A ``role`` is a named list of permissions (defined by
- IAM or configured by users). A ``binding`` can
- optionally specify a ``condition``, which is a logic
- expression that further constrains the role binding
- based on attributes about the request and/or target
- resource.
-
- **JSON Example**
-
- ::
-
- {
- "bindings": [
- {
- "role": "roles/resourcemanager.organizationAdmin",
- "members": [
- "user:mike@example.com",
- "group:admins@example.com",
- "domain:google.com",
- "serviceAccount:my-project-id@appspot.gserviceaccount.com"
- ]
- },
- {
- "role": "roles/resourcemanager.organizationViewer",
- "members": ["user:eve@example.com"],
- "condition": {
- "title": "expirable access",
- "description": "Does not grant access after Sep 2020",
- "expression": "request.time <
- timestamp('2020-10-01T00:00:00.000Z')",
- }
- }
- ]
- }
-
- **YAML Example**
-
- ::
-
- bindings:
- - members:
- - user:mike@example.com
- - group:admins@example.com
- - domain:google.com
- - serviceAccount:my-project-id@appspot.gserviceaccount.com
- role: roles/resourcemanager.organizationAdmin
- - members:
- - user:eve@example.com
- role: roles/resourcemanager.organizationViewer
- condition:
- title: expirable access
- description: Does not grant access after Sep 2020
- expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
-
- For a description of IAM and its features, see the `IAM
- developer's
- guide `__.
+ google.iam.v1.policy_pb2.Policy:
+ Defines an Identity and Access Management (IAM) policy. It is used to
+ specify access control policies for Cloud Platform
+ resources.
+
+ A Policy is a collection of bindings. A binding binds
+ one or more members to a single role. Members can be
+ user accounts, service accounts, Google groups, and
+ domains (such as G Suite). A role is a named list of
+ permissions (defined by IAM or configured by users).
+ A binding can optionally specify a condition, which
+ is a logic expression that further constrains the
+ role binding based on attributes about the request
+ and/or target resource.
+
+ **JSON Example**
+
+ {
+ "bindings": [
+ {
+ "role":
+ "roles/resourcemanager.organizationAdmin",
+ "members": [ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+
+ }, { "role":
+ "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": { "title": "expirable access",
+ "description": "Does not grant access after
+ Sep 2020", "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')", } }
+
+ ]
+
+ }
+
+ **YAML Example**
+
+ bindings: - members: - user:\ mike@example.com -
+ group:\ admins@example.com - domain:google.com -
+ serviceAccount:\ my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin -
+ members: - user:\ eve@example.com role:
+ roles/resourcemanager.organizationViewer
+ condition: title: expirable access description:
+ Does not grant access after Sep 2020 expression:
+ request.time <
+ timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the
+ [IAM developer's
+ guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
@@ -825,10 +829,9 @@ async def set_iam_policy(
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.SetIamPolicyRequest(**request)
-
+ request = iam_policy_pb2.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest(resource=resource,)
+ request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -852,14 +855,14 @@ async def set_iam_policy(
async def test_iam_permissions(
self,
- request: iam_policy.TestIamPermissionsRequest = None,
+ request: iam_policy_pb2.TestIamPermissionsRequest = None,
*,
resource: str = None,
permissions: Sequence[str] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> iam_policy.TestIamPermissionsResponse:
+ ) -> iam_policy_pb2.TestIamPermissionsResponse:
r"""Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
permissions, not a ``NOT_FOUND`` error.
@@ -870,7 +873,7 @@ async def test_iam_permissions(
warning.
Args:
- request (:class:`~.iam_policy.TestIamPermissionsRequest`):
+ request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`):
The request object. Request message for
`TestIamPermissions` method.
resource (:class:`str`):
@@ -878,6 +881,7 @@ async def test_iam_permissions(
policy detail is being requested. See
the operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -886,10 +890,10 @@ async def test_iam_permissions(
Permissions with wildcards (such as '*' or 'storage.*')
are not allowed. For more information see `IAM
Overview `__.
+
This corresponds to the ``permissions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -897,8 +901,8 @@ async def test_iam_permissions(
sent along with the request as metadata.
Returns:
- ~.iam_policy.TestIamPermissionsResponse:
- Response message for ``TestIamPermissions`` method.
+ google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse:
+ Response message for TestIamPermissions method.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
@@ -913,10 +917,9 @@ async def test_iam_permissions(
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.TestIamPermissionsRequest(**request)
-
+ request = iam_policy_pb2.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest(
+ request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions,
)
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/google/cloud/bigquery_connection_v1/services/connection_service/client.py
index c25aef2..1e06e36 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/client.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from distutils import util
import os
@@ -23,10 +21,10 @@
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
@@ -35,10 +33,9 @@
from google.cloud.bigquery_connection_v1.services.connection_service import pagers
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ConnectionServiceGrpcTransport
from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
@@ -61,7 +58,7 @@ class ConnectionServiceClientMeta(type):
def get_transport_class(
cls, label: str = None,
) -> Type[ConnectionServiceTransport]:
- """Return an appropriate transport class.
+ """Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
@@ -84,7 +81,8 @@ class ConnectionServiceClient(metaclass=ConnectionServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
- """Convert api endpoint to mTLS endpoint.
+ """Converts api endpoint to mTLS endpoint.
+
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
@@ -116,10 +114,27 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ ConnectionServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
- file.
+ file.
Args:
filename (str): The path to the service account private key json
@@ -128,7 +143,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ ConnectionServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -138,23 +153,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
@property
def transport(self) -> ConnectionServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
- ConnectionServiceTransport: The transport used by the client instance.
+ ConnectionServiceTransport: The transport used by the client
+ instance.
"""
return self._transport
@staticmethod
def connection_path(project: str, location: str, connection: str,) -> str:
- """Return a fully-qualified connection string."""
+ """Returns a fully-qualified connection string."""
return "projects/{project}/locations/{location}/connections/{connection}".format(
project=project, location=location, connection=connection,
)
@staticmethod
def parse_connection_path(path: str) -> Dict[str, str]:
- """Parse a connection path into its component segments."""
+ """Parses a connection path into its component segments."""
m = re.match(
r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$",
path,
@@ -163,7 +179,7 @@ def parse_connection_path(path: str) -> Dict[str, str]:
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
- """Return a fully-qualified billing_account string."""
+ """Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -176,7 +192,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
@staticmethod
def common_folder_path(folder: str,) -> str:
- """Return a fully-qualified folder string."""
+ """Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
@@ -187,7 +203,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]:
@staticmethod
def common_organization_path(organization: str,) -> str:
- """Return a fully-qualified organization string."""
+ """Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
@@ -198,7 +214,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]:
@staticmethod
def common_project_path(project: str,) -> str:
- """Return a fully-qualified project string."""
+ """Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
@@ -209,7 +225,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]:
@staticmethod
def common_location_path(project: str, location: str,) -> str:
- """Return a fully-qualified location string."""
+ """Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -223,12 +239,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def __init__(
self,
*,
- credentials: Optional[credentials.Credentials] = None,
+ credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, ConnectionServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the connection service client.
+ """Instantiates the connection service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -236,10 +252,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.ConnectionServiceTransport]): The
+ transport (Union[str, ConnectionServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -275,21 +291,18 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -301,12 +314,14 @@ def __init__(
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
- )
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
)
# Save or instantiate the transport.
@@ -321,8 +336,8 @@ def __init__(
)
if client_options.scopes:
raise ValueError(
- "When providing a transport instance, "
- "provide its scopes directly."
+ "When providing a transport instance, provide its scopes "
+ "directly."
)
self._transport = transport
else:
@@ -332,7 +347,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -351,27 +366,28 @@ def create_connection(
r"""Creates a new connection.
Args:
- request (:class:`~.gcbc_connection.CreateConnectionRequest`):
+ request (google.cloud.bigquery_connection_v1.types.CreateConnectionRequest):
The request object. The request for
[ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection].
- parent (:class:`str`):
+ parent (str):
Required. Parent resource name. Must be in the format
``projects/{project_id}/locations/{location_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- connection (:class:`~.gcbc_connection.Connection`):
+ connection (google.cloud.bigquery_connection_v1.types.Connection):
Required. Connection to create.
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- connection_id (:class:`str`):
+ connection_id (str):
Optional. Connection id that should
be assigned to the created connection.
+
This corresponds to the ``connection_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -379,7 +395,7 @@ def create_connection(
sent along with the request as metadata.
Returns:
- ~.gcbc_connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -401,10 +417,8 @@ def create_connection(
# there are no flattened fields.
if not isinstance(request, gcbc_connection.CreateConnectionRequest):
request = gcbc_connection.CreateConnectionRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if connection is not None:
@@ -440,16 +454,16 @@ def get_connection(
r"""Returns specified connection.
Args:
- request (:class:`~.connection.GetConnectionRequest`):
+ request (google.cloud.bigquery_connection_v1.types.GetConnectionRequest):
The request object. The request for
[ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection].
- name (:class:`str`):
+ name (str):
Required. Name of the requested connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -457,7 +471,7 @@ def get_connection(
sent along with the request as metadata.
Returns:
- ~.connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -479,10 +493,8 @@ def get_connection(
# there are no flattened fields.
if not isinstance(request, connection.GetConnectionRequest):
request = connection.GetConnectionRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -514,16 +526,16 @@ def list_connections(
r"""Returns a list of connections in the given project.
Args:
- request (:class:`~.connection.ListConnectionsRequest`):
+ request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest):
The request object. The request for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
- parent (:class:`str`):
+ parent (str):
Required. Parent resource name. Must be in the form:
``projects/{project_id}/locations/{location_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -531,7 +543,7 @@ def list_connections(
sent along with the request as metadata.
Returns:
- ~.pagers.ListConnectionsPager:
+ google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsPager:
The response for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
@@ -555,10 +567,8 @@ def list_connections(
# there are no flattened fields.
if not isinstance(request, connection.ListConnectionsRequest):
request = connection.ListConnectionsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -590,7 +600,7 @@ def update_connection(
*,
name: str = None,
connection: gcbc_connection.Connection = None,
- update_mask: field_mask.FieldMask = None,
+ update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -600,28 +610,30 @@ def update_connection(
are in the update field mask.
Args:
- request (:class:`~.gcbc_connection.UpdateConnectionRequest`):
+ request (google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest):
The request object. The request for
[ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection].
- name (:class:`str`):
+ name (str):
Required. Name of the connection to update, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- connection (:class:`~.gcbc_connection.Connection`):
+ connection (google.cloud.bigquery_connection_v1.types.Connection):
Required. Connection containing the
updated fields.
+
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.field_mask.FieldMask`):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Update mask for the
connection fields to be updated.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -629,7 +641,7 @@ def update_connection(
sent along with the request as metadata.
Returns:
- ~.gcbc_connection.Connection:
+ google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
@@ -651,10 +663,8 @@ def update_connection(
# there are no flattened fields.
if not isinstance(request, gcbc_connection.UpdateConnectionRequest):
request = gcbc_connection.UpdateConnectionRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
if connection is not None:
@@ -690,16 +700,16 @@ def delete_connection(
r"""Deletes connection and associated credential.
Args:
- request (:class:`~.connection.DeleteConnectionRequest`):
+ request (google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest):
The request object. The request for
[ConnectionService.DeleteConnectionRequest][].
- name (:class:`str`):
+ name (str):
Required. Name of the deleted connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -722,10 +732,8 @@ def delete_connection(
# there are no flattened fields.
if not isinstance(request, connection.DeleteConnectionRequest):
request = connection.DeleteConnectionRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -746,30 +754,30 @@ def delete_connection(
def get_iam_policy(
self,
- request: iam_policy.GetIamPolicyRequest = None,
+ request: iam_policy_pb2.GetIamPolicyRequest = None,
*,
resource: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> policy.Policy:
+ ) -> policy_pb2.Policy:
r"""Gets the access control policy for a resource.
Returns an empty policy if the resource exists and does
not have a policy set.
Args:
- request (:class:`~.iam_policy.GetIamPolicyRequest`):
+ request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest):
The request object. Request message for `GetIamPolicy`
method.
- resource (:class:`str`):
+ resource (str):
REQUIRED: The resource for which the
policy is being requested. See the
operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -777,72 +785,62 @@ def get_iam_policy(
sent along with the request as metadata.
Returns:
- ~.policy.Policy:
- Defines an Identity and Access Management (IAM) policy.
- It is used to specify access control policies for Cloud
- Platform resources.
-
- A ``Policy`` is a collection of ``bindings``. A
- ``binding`` binds one or more ``members`` to a single
- ``role``. Members can be user accounts, service
- accounts, Google groups, and domains (such as G Suite).
- A ``role`` is a named list of permissions (defined by
- IAM or configured by users). A ``binding`` can
- optionally specify a ``condition``, which is a logic
- expression that further constrains the role binding
- based on attributes about the request and/or target
- resource.
-
- **JSON Example**
-
- ::
-
- {
- "bindings": [
- {
- "role": "roles/resourcemanager.organizationAdmin",
- "members": [
- "user:mike@example.com",
- "group:admins@example.com",
- "domain:google.com",
- "serviceAccount:my-project-id@appspot.gserviceaccount.com"
- ]
- },
- {
- "role": "roles/resourcemanager.organizationViewer",
- "members": ["user:eve@example.com"],
- "condition": {
- "title": "expirable access",
- "description": "Does not grant access after Sep 2020",
- "expression": "request.time <
- timestamp('2020-10-01T00:00:00.000Z')",
- }
- }
- ]
- }
-
- **YAML Example**
-
- ::
-
- bindings:
- - members:
- - user:mike@example.com
- - group:admins@example.com
- - domain:google.com
- - serviceAccount:my-project-id@appspot.gserviceaccount.com
- role: roles/resourcemanager.organizationAdmin
- - members:
- - user:eve@example.com
- role: roles/resourcemanager.organizationViewer
- condition:
- title: expirable access
- description: Does not grant access after Sep 2020
- expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
-
- For a description of IAM and its features, see the `IAM
- developer's
- guide `__.
+ google.iam.v1.policy_pb2.Policy:
+ Defines an Identity and Access Management (IAM) policy. It is used to
+ specify access control policies for Cloud Platform
+ resources.
+
+ A Policy is a collection of bindings. A binding binds
+ one or more members to a single role. Members can be
+ user accounts, service accounts, Google groups, and
+ domains (such as G Suite). A role is a named list of
+ permissions (defined by IAM or configured by users).
+ A binding can optionally specify a condition, which
+ is a logic expression that further constrains the
+ role binding based on attributes about the request
+ and/or target resource.
+
+ **JSON Example**
+
+ {
+ "bindings": [
+ {
+ "role":
+ "roles/resourcemanager.organizationAdmin",
+ "members": [ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+
+ }, { "role":
+ "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": { "title": "expirable access",
+ "description": "Does not grant access after
+ Sep 2020", "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')", } }
+
+ ]
+
+ }
+
+ **YAML Example**
+
+ bindings: - members: - user:\ mike@example.com -
+ group:\ admins@example.com - domain:google.com -
+ serviceAccount:\ my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin -
+ members: - user:\ eve@example.com role:
+ roles/resourcemanager.organizationViewer
+ condition: title: expirable access description:
+ Does not grant access after Sep 2020 expression:
+ request.time <
+ timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the
+ [IAM developer's
+ guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
@@ -855,13 +853,15 @@ def get_iam_policy(
"the individual field arguments should be set."
)
- # The request isn't a proto-plus wrapped type,
- # so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.GetIamPolicyRequest(**request)
-
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ request = iam_policy_pb2.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest(resource=resource,)
+ # Null request, just make one.
+ request = iam_policy_pb2.GetIamPolicyRequest()
+ if resource is not None:
+ request.resource = resource
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -881,13 +881,13 @@ def get_iam_policy(
def set_iam_policy(
self,
- request: iam_policy.SetIamPolicyRequest = None,
+ request: iam_policy_pb2.SetIamPolicyRequest = None,
*,
resource: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> policy.Policy:
+ ) -> policy_pb2.Policy:
r"""Sets the access control policy on the specified resource.
Replaces any existing policy.
@@ -895,18 +895,18 @@ def set_iam_policy(
``PERMISSION_DENIED`` errors.
Args:
- request (:class:`~.iam_policy.SetIamPolicyRequest`):
+ request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest):
The request object. Request message for `SetIamPolicy`
method.
- resource (:class:`str`):
+ resource (str):
REQUIRED: The resource for which the
policy is being specified. See the
operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -914,72 +914,62 @@ def set_iam_policy(
sent along with the request as metadata.
Returns:
- ~.policy.Policy:
- Defines an Identity and Access Management (IAM) policy.
- It is used to specify access control policies for Cloud
- Platform resources.
-
- A ``Policy`` is a collection of ``bindings``. A
- ``binding`` binds one or more ``members`` to a single
- ``role``. Members can be user accounts, service
- accounts, Google groups, and domains (such as G Suite).
- A ``role`` is a named list of permissions (defined by
- IAM or configured by users). A ``binding`` can
- optionally specify a ``condition``, which is a logic
- expression that further constrains the role binding
- based on attributes about the request and/or target
- resource.
-
- **JSON Example**
-
- ::
-
- {
- "bindings": [
- {
- "role": "roles/resourcemanager.organizationAdmin",
- "members": [
- "user:mike@example.com",
- "group:admins@example.com",
- "domain:google.com",
- "serviceAccount:my-project-id@appspot.gserviceaccount.com"
- ]
- },
- {
- "role": "roles/resourcemanager.organizationViewer",
- "members": ["user:eve@example.com"],
- "condition": {
- "title": "expirable access",
- "description": "Does not grant access after Sep 2020",
- "expression": "request.time <
- timestamp('2020-10-01T00:00:00.000Z')",
- }
- }
- ]
- }
-
- **YAML Example**
-
- ::
-
- bindings:
- - members:
- - user:mike@example.com
- - group:admins@example.com
- - domain:google.com
- - serviceAccount:my-project-id@appspot.gserviceaccount.com
- role: roles/resourcemanager.organizationAdmin
- - members:
- - user:eve@example.com
- role: roles/resourcemanager.organizationViewer
- condition:
- title: expirable access
- description: Does not grant access after Sep 2020
- expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
-
- For a description of IAM and its features, see the `IAM
- developer's
- guide `__.
+ google.iam.v1.policy_pb2.Policy:
+ Defines an Identity and Access Management (IAM) policy. It is used to
+ specify access control policies for Cloud Platform
+ resources.
+
+ A Policy is a collection of bindings. A binding binds
+ one or more members to a single role. Members can be
+ user accounts, service accounts, Google groups, and
+ domains (such as G Suite). A role is a named list of
+ permissions (defined by IAM or configured by users).
+ A binding can optionally specify a condition, which
+ is a logic expression that further constrains the
+ role binding based on attributes about the request
+ and/or target resource.
+
+ **JSON Example**
+
+ {
+ "bindings": [
+ {
+ "role":
+ "roles/resourcemanager.organizationAdmin",
+ "members": [ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+
+ }, { "role":
+ "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": { "title": "expirable access",
+ "description": "Does not grant access after
+ Sep 2020", "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')", } }
+
+ ]
+
+ }
+
+ **YAML Example**
+
+ bindings: - members: - user:\ mike@example.com -
+ group:\ admins@example.com - domain:google.com -
+ serviceAccount:\ my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin -
+ members: - user:\ eve@example.com role:
+ roles/resourcemanager.organizationViewer
+ condition: title: expirable access description:
+ Does not grant access after Sep 2020 expression:
+ request.time <
+ timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the
+ [IAM developer's
+ guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
@@ -992,13 +982,15 @@ def set_iam_policy(
"the individual field arguments should be set."
)
- # The request isn't a proto-plus wrapped type,
- # so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.SetIamPolicyRequest(**request)
-
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ request = iam_policy_pb2.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest(resource=resource,)
+ # Null request, just make one.
+ request = iam_policy_pb2.SetIamPolicyRequest()
+ if resource is not None:
+ request.resource = resource
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -1018,14 +1010,14 @@ def set_iam_policy(
def test_iam_permissions(
self,
- request: iam_policy.TestIamPermissionsRequest = None,
+ request: iam_policy_pb2.TestIamPermissionsRequest = None,
*,
resource: str = None,
permissions: Sequence[str] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> iam_policy.TestIamPermissionsResponse:
+ ) -> iam_policy_pb2.TestIamPermissionsResponse:
r"""Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
permissions, not a ``NOT_FOUND`` error.
@@ -1036,26 +1028,27 @@ def test_iam_permissions(
warning.
Args:
- request (:class:`~.iam_policy.TestIamPermissionsRequest`):
+ request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest):
The request object. Request message for
`TestIamPermissions` method.
- resource (:class:`str`):
+ resource (str):
REQUIRED: The resource for which the
policy detail is being requested. See
the operation documentation for the
appropriate value for this field.
+
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- permissions (:class:`Sequence[str]`):
+ permissions (Sequence[str]):
The set of permissions to check for the ``resource``.
Permissions with wildcards (such as '*' or 'storage.*')
are not allowed. For more information see `IAM
Overview `__.
+
This corresponds to the ``permissions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1063,8 +1056,8 @@ def test_iam_permissions(
sent along with the request as metadata.
Returns:
- ~.iam_policy.TestIamPermissionsResponse:
- Response message for ``TestIamPermissions`` method.
+ google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse:
+ Response message for TestIamPermissions method.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
@@ -1076,15 +1069,17 @@ def test_iam_permissions(
"the individual field arguments should be set."
)
- # The request isn't a proto-plus wrapped type,
- # so it must be constructed via keyword expansion.
if isinstance(request, dict):
- request = iam_policy.TestIamPermissionsRequest(**request)
-
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ request = iam_policy_pb2.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest(
- resource=resource, permissions=permissions,
- )
+ # Null request, just make one.
+ request = iam_policy_pb2.TestIamPermissionsRequest()
+ if resource is not None:
+ request.resource = resource
+ if permissions:
+ request.permissions.extend(permissions)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py b/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py
index b9537fc..9706379 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.bigquery_connection_v1.types import connection
@@ -24,7 +31,7 @@ class ListConnectionsPager:
"""A pager for iterating through ``list_connections`` requests.
This class thinly wraps an initial
- :class:`~.connection.ListConnectionsResponse` object, and
+ :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and
provides an ``__iter__`` method to iterate through its
``connections`` field.
@@ -33,7 +40,7 @@ class ListConnectionsPager:
through the ``connections`` field on the
corresponding responses.
- All the usual :class:`~.connection.ListConnectionsResponse`
+ All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -51,9 +58,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.connection.ListConnectionsRequest`):
+ request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest):
The initial request object.
- response (:class:`~.connection.ListConnectionsResponse`):
+ response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -86,7 +93,7 @@ class ListConnectionsAsyncPager:
"""A pager for iterating through ``list_connections`` requests.
This class thinly wraps an initial
- :class:`~.connection.ListConnectionsResponse` object, and
+ :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``connections`` field.
@@ -95,7 +102,7 @@ class ListConnectionsAsyncPager:
through the ``connections`` field on the
corresponding responses.
- All the usual :class:`~.connection.ListConnectionsResponse`
+ All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -108,14 +115,14 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.connection.ListConnectionsRequest`):
+ request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest):
The initial request object.
- response (:class:`~.connection.ListConnectionsResponse`):
+ response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py b/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py
index 3835c8f..86df1ca 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from typing import Dict, Type
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py b/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py
index 58b35b9..6b9c304 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,23 +13,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import abc
-import typing
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+import packaging.version
import pkg_resources
-from google import auth # type: ignore
-from google.api_core import exceptions # type: ignore
+import google.auth # type: ignore
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -41,6 +41,15 @@
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+try:
+ # google.auth.__version__ was added in 1.26.0
+ _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+ try: # try pkg_resources if it is available
+ _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+ except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GOOGLE_AUTH_VERSION = None
+
class ConnectionServiceTransport(abc.ABC):
"""Abstract transport class for ConnectionService."""
@@ -50,21 +59,25 @@ class ConnectionServiceTransport(abc.ABC):
"https://www.googleapis.com/auth/cloud-platform",
)
+ DEFAULT_HOST: str = "bigqueryconnection.googleapis.com"
+
def __init__(
self,
*,
- host: str = "bigqueryconnection.googleapis.com",
- credentials: credentials.Credentials = None,
- credentials_file: typing.Optional[str] = None,
- scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
- quota_project_id: typing.Optional[str] = None,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -73,42 +86,77 @@ def __init__(
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
- scope (Optional[Sequence[str]]): A list of scopes.
+ scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
+ scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
+
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
- raise exceptions.DuplicateCredentialArgs(
+ raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
- credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
- credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
)
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
+ # TODO(busunkim): This method is in the base transport
+ # to avoid duplicating code across the transport classes. These functions
+ # should be deleted once the minimum required versions of google-auth is increased.
+
+ # TODO: Remove this function once google-auth >= 1.25.0 is required
+ @classmethod
+ def _get_scopes_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Optional[Sequence[str]]]:
+ """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
+
+ scopes_kwargs = {}
+
+ if _GOOGLE_AUTH_VERSION and (
+ packaging.version.parse(_GOOGLE_AUTH_VERSION)
+ >= packaging.version.parse("1.25.0")
+ ):
+ scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
+ else:
+ scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
+
+ return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
@@ -123,8 +171,10 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -136,8 +186,10 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -152,8 +204,10 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -174,31 +228,29 @@ def _prep_wrapped_messages(self, client_info):
@property
def create_connection(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[gcbc_connection.CreateConnectionRequest],
- typing.Union[
- gcbc_connection.Connection, typing.Awaitable[gcbc_connection.Connection]
- ],
+ Union[gcbc_connection.Connection, Awaitable[gcbc_connection.Connection]],
]:
raise NotImplementedError()
@property
def get_connection(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[connection.GetConnectionRequest],
- typing.Union[connection.Connection, typing.Awaitable[connection.Connection]],
+ Union[connection.Connection, Awaitable[connection.Connection]],
]:
raise NotImplementedError()
@property
def list_connections(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[connection.ListConnectionsRequest],
- typing.Union[
+ Union[
connection.ListConnectionsResponse,
- typing.Awaitable[connection.ListConnectionsResponse],
+ Awaitable[connection.ListConnectionsResponse],
],
]:
raise NotImplementedError()
@@ -206,49 +258,47 @@ def list_connections(
@property
def update_connection(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[gcbc_connection.UpdateConnectionRequest],
- typing.Union[
- gcbc_connection.Connection, typing.Awaitable[gcbc_connection.Connection]
- ],
+ Union[gcbc_connection.Connection, Awaitable[gcbc_connection.Connection]],
]:
raise NotImplementedError()
@property
def delete_connection(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[connection.DeleteConnectionRequest],
- typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
- ) -> typing.Callable[
- [iam_policy.GetIamPolicyRequest],
- typing.Union[policy.Policy, typing.Awaitable[policy.Policy]],
+ ) -> Callable[
+ [iam_policy_pb2.GetIamPolicyRequest],
+ Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
- ) -> typing.Callable[
- [iam_policy.SetIamPolicyRequest],
- typing.Union[policy.Policy, typing.Awaitable[policy.Policy]],
+ ) -> Callable[
+ [iam_policy_pb2.SetIamPolicyRequest],
+ Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
- ) -> typing.Callable[
- [iam_policy.TestIamPermissionsRequest],
- typing.Union[
- iam_policy.TestIamPermissionsResponse,
- typing.Awaitable[iam_policy.TestIamPermissionsResponse],
+ ) -> Callable[
+ [iam_policy_pb2.TestIamPermissionsRequest],
+ Union[
+ iam_policy_pb2.TestIamPermissionsResponse,
+ Awaitable[iam_policy_pb2.TestIamPermissionsResponse],
],
]:
raise NotImplementedError()
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py b/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py
index d9a04e1..24d836b 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,24 +13,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Callable, Dict, Optional, Sequence, Tuple
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
@@ -54,20 +51,22 @@ def __init__(
self,
*,
host: str = "bigqueryconnection.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -91,6 +90,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -105,72 +108,61 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -178,23 +170,14 @@ def __init__(
],
)
- self._stubs = {} # type: Dict[str, Callable]
-
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "bigqueryconnection.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -202,7 +185,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -225,13 +208,15 @@ def create_channel(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -356,7 +341,7 @@ def update_connection(
@property
def delete_connection(
self,
- ) -> Callable[[connection.DeleteConnectionRequest], empty.Empty]:
+ ) -> Callable[[connection.DeleteConnectionRequest], empty_pb2.Empty]:
r"""Return a callable for the delete connection method over gRPC.
Deletes connection and associated credential.
@@ -375,14 +360,14 @@ def delete_connection(
self._stubs["delete_connection"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection",
request_serializer=connection.DeleteConnectionRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_connection"]
@property
def get_iam_policy(
self,
- ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]:
+ ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for a resource.
@@ -402,15 +387,15 @@ def get_iam_policy(
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy",
- request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString,
- response_deserializer=policy.Policy.FromString,
+ request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def set_iam_policy(
self,
- ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]:
+ ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy on the specified resource.
@@ -432,8 +417,8 @@ def set_iam_policy(
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy",
- request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString,
- response_deserializer=policy.Policy.FromString,
+ request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@@ -441,7 +426,8 @@ def set_iam_policy(
def test_iam_permissions(
self,
) -> Callable[
- [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse
+ [iam_policy_pb2.TestIamPermissionsRequest],
+ iam_policy_pb2.TestIamPermissionsResponse,
]:
r"""Return a callable for the test iam permissions method over gRPC.
@@ -467,8 +453,8 @@ def test_iam_permissions(
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions",
- request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString,
- response_deserializer=iam_policy.TestIamPermissionsResponse.FromString,
+ request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
+ response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
diff --git a/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py b/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py
index 673a50b..ffbdc28 100644
--- a/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,25 +13,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ConnectionServiceGrpcTransport
@@ -57,7 +54,7 @@ class ConnectionServiceGrpcAsyncIOTransport(ConnectionServiceTransport):
def create_channel(
cls,
host: str = "bigqueryconnection.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -65,7 +62,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -84,13 +81,15 @@ def create_channel(
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -98,20 +97,22 @@ def __init__(
self,
*,
host: str = "bigqueryconnection.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -136,12 +137,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -150,72 +155,60 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -223,17 +216,8 @@ def __init__(
],
)
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
-
- self._stubs = {}
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
@@ -361,7 +345,7 @@ def update_connection(
@property
def delete_connection(
self,
- ) -> Callable[[connection.DeleteConnectionRequest], Awaitable[empty.Empty]]:
+ ) -> Callable[[connection.DeleteConnectionRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete connection method over gRPC.
Deletes connection and associated credential.
@@ -380,14 +364,14 @@ def delete_connection(
self._stubs["delete_connection"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection",
request_serializer=connection.DeleteConnectionRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_connection"]
@property
def get_iam_policy(
self,
- ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]:
+ ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for a resource.
@@ -407,15 +391,15 @@ def get_iam_policy(
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy",
- request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString,
- response_deserializer=policy.Policy.FromString,
+ request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def set_iam_policy(
self,
- ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]:
+ ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy on the specified resource.
@@ -437,8 +421,8 @@ def set_iam_policy(
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy",
- request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString,
- response_deserializer=policy.Policy.FromString,
+ request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@@ -446,8 +430,8 @@ def set_iam_policy(
def test_iam_permissions(
self,
) -> Callable[
- [iam_policy.TestIamPermissionsRequest],
- Awaitable[iam_policy.TestIamPermissionsResponse],
+ [iam_policy_pb2.TestIamPermissionsRequest],
+ Awaitable[iam_policy_pb2.TestIamPermissionsResponse],
]:
r"""Return a callable for the test iam permissions method over gRPC.
@@ -473,8 +457,8 @@ def test_iam_permissions(
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions",
- request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString,
- response_deserializer=iam_policy.TestIamPermissionsResponse.FromString,
+ request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
+ response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
diff --git a/google/cloud/bigquery_connection_v1/types/__init__.py b/google/cloud/bigquery_connection_v1/types/__init__.py
index fa8107a..cfaf47a 100644
--- a/google/cloud/bigquery_connection_v1/types/__init__.py
+++ b/google/cloud/bigquery_connection_v1/types/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,31 +13,30 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .connection import (
+ AwsCrossAccountRole,
+ AwsProperties,
+ CloudSqlCredential,
+ CloudSqlProperties,
+ Connection,
CreateConnectionRequest,
+ DeleteConnectionRequest,
GetConnectionRequest,
ListConnectionsRequest,
ListConnectionsResponse,
UpdateConnectionRequest,
- DeleteConnectionRequest,
- Connection,
- CloudSqlProperties,
- CloudSqlCredential,
- AwsProperties,
- AwsCrossAccountRole,
)
__all__ = (
+ "AwsCrossAccountRole",
+ "AwsProperties",
+ "CloudSqlCredential",
+ "CloudSqlProperties",
+ "Connection",
"CreateConnectionRequest",
+ "DeleteConnectionRequest",
"GetConnectionRequest",
"ListConnectionsRequest",
"ListConnectionsResponse",
"UpdateConnectionRequest",
- "DeleteConnectionRequest",
- "Connection",
- "CloudSqlProperties",
- "CloudSqlCredential",
- "AwsProperties",
- "AwsCrossAccountRole",
)
diff --git a/google/cloud/bigquery_connection_v1/types/connection.py b/google/cloud/bigquery_connection_v1/types/connection.py
index a964123..6e83042 100644
--- a/google/cloud/bigquery_connection_v1/types/connection.py
+++ b/google/cloud/bigquery_connection_v1/types/connection.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,11 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -50,14 +47,12 @@ class CreateConnectionRequest(proto.Message):
connection_id (str):
Optional. Connection id that should be
assigned to the created connection.
- connection (~.gcbc_connection.Connection):
+ connection (google.cloud.bigquery_connection_v1.types.Connection):
Required. Connection to create.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- connection_id = proto.Field(proto.STRING, number=2)
-
+ parent = proto.Field(proto.STRING, number=1,)
+ connection_id = proto.Field(proto.STRING, number=2,)
connection = proto.Field(proto.MESSAGE, number=3, message="Connection",)
@@ -71,7 +66,7 @@ class GetConnectionRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class ListConnectionsRequest(proto.Message):
@@ -88,11 +83,9 @@ class ListConnectionsRequest(proto.Message):
Page token.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- page_size = proto.Field(proto.INT32, number=4)
-
- page_token = proto.Field(proto.STRING, number=3)
+ parent = proto.Field(proto.STRING, number=1,)
+ page_size = proto.Field(proto.INT32, number=4,)
+ page_token = proto.Field(proto.STRING, number=3,)
class ListConnectionsResponse(proto.Message):
@@ -102,7 +95,7 @@ class ListConnectionsResponse(proto.Message):
Attributes:
next_page_token (str):
Next page token.
- connections (Sequence[~.gcbc_connection.Connection]):
+ connections (Sequence[google.cloud.bigquery_connection_v1.types.Connection]):
List of connections.
"""
@@ -110,8 +103,7 @@ class ListConnectionsResponse(proto.Message):
def raw_page(self):
return self
- next_page_token = proto.Field(proto.STRING, number=1)
-
+ next_page_token = proto.Field(proto.STRING, number=1,)
connections = proto.RepeatedField(proto.MESSAGE, number=2, message="Connection",)
@@ -123,31 +115,30 @@ class UpdateConnectionRequest(proto.Message):
name (str):
Required. Name of the connection to update, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
- connection (~.gcbc_connection.Connection):
+ connection (google.cloud.bigquery_connection_v1.types.Connection):
Required. Connection containing the updated
fields.
- update_mask (~.field_mask.FieldMask):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Update mask for the connection
fields to be updated.
"""
- name = proto.Field(proto.STRING, number=1)
-
+ name = proto.Field(proto.STRING, number=1,)
connection = proto.Field(proto.MESSAGE, number=2, message="Connection",)
-
- update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,)
+ update_mask = proto.Field(
+ proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask,
+ )
class DeleteConnectionRequest(proto.Message):
r"""The request for [ConnectionService.DeleteConnectionRequest][].
-
Attributes:
name (str):
Required. Name of the deleted connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class Connection(proto.Message):
@@ -163,9 +154,9 @@ class Connection(proto.Message):
connection.
description (str):
User provided description.
- cloud_sql (~.gcbc_connection.CloudSqlProperties):
+ cloud_sql (google.cloud.bigquery_connection_v1.types.CloudSqlProperties):
Cloud SQL properties.
- aws (~.gcbc_connection.AwsProperties):
+ aws (google.cloud.bigquery_connection_v1.types.AwsProperties):
Amazon Web Services (AWS) properties.
creation_time (int):
Output only. The creation timestamp of the
@@ -178,39 +169,31 @@ class Connection(proto.Message):
configured for this connection.
"""
- name = proto.Field(proto.STRING, number=1)
-
- friendly_name = proto.Field(proto.STRING, number=2)
-
- description = proto.Field(proto.STRING, number=3)
-
+ name = proto.Field(proto.STRING, number=1,)
+ friendly_name = proto.Field(proto.STRING, number=2,)
+ description = proto.Field(proto.STRING, number=3,)
cloud_sql = proto.Field(
proto.MESSAGE, number=4, oneof="properties", message="CloudSqlProperties",
)
-
aws = proto.Field(
proto.MESSAGE, number=8, oneof="properties", message="AwsProperties",
)
-
- creation_time = proto.Field(proto.INT64, number=5)
-
- last_modified_time = proto.Field(proto.INT64, number=6)
-
- has_credential = proto.Field(proto.BOOL, number=7)
+ creation_time = proto.Field(proto.INT64, number=5,)
+ last_modified_time = proto.Field(proto.INT64, number=6,)
+ has_credential = proto.Field(proto.BOOL, number=7,)
class CloudSqlProperties(proto.Message):
r"""Connection properties specific to the Cloud SQL.
-
Attributes:
instance_id (str):
Cloud SQL instance ID in the form
``project:location:instance``.
database (str):
Database name.
- type_ (~.gcbc_connection.CloudSqlProperties.DatabaseType):
+ type_ (google.cloud.bigquery_connection_v1.types.CloudSqlProperties.DatabaseType):
Type of the Cloud SQL database.
- credential (~.gcbc_connection.CloudSqlCredential):
+ credential (google.cloud.bigquery_connection_v1.types.CloudSqlCredential):
Input only. Cloud SQL credential.
"""
@@ -220,18 +203,14 @@ class DatabaseType(proto.Enum):
POSTGRES = 1
MYSQL = 2
- instance_id = proto.Field(proto.STRING, number=1)
-
- database = proto.Field(proto.STRING, number=2)
-
+ instance_id = proto.Field(proto.STRING, number=1,)
+ database = proto.Field(proto.STRING, number=2,)
type_ = proto.Field(proto.ENUM, number=3, enum=DatabaseType,)
-
credential = proto.Field(proto.MESSAGE, number=4, message="CloudSqlCredential",)
class CloudSqlCredential(proto.Message):
r"""Credential info for the Cloud SQL.
-
Attributes:
username (str):
The username for the credential.
@@ -239,16 +218,14 @@ class CloudSqlCredential(proto.Message):
The password for the credential.
"""
- username = proto.Field(proto.STRING, number=1)
-
- password = proto.Field(proto.STRING, number=2)
+ username = proto.Field(proto.STRING, number=1,)
+ password = proto.Field(proto.STRING, number=2,)
class AwsProperties(proto.Message):
r"""Connection properties specific to Amazon Web Services (AWS).
-
Attributes:
- cross_account_role (~.gcbc_connection.AwsCrossAccountRole):
+ cross_account_role (google.cloud.bigquery_connection_v1.types.AwsCrossAccountRole):
Authentication using Google owned AWS IAM
user's access key to assume into customer's AWS
IAM Role.
@@ -281,11 +258,9 @@ class AwsCrossAccountRole(proto.Message):
https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html
"""
- iam_role_id = proto.Field(proto.STRING, number=1)
-
- iam_user_id = proto.Field(proto.STRING, number=2)
-
- external_id = proto.Field(proto.STRING, number=3)
+ iam_role_id = proto.Field(proto.STRING, number=1,)
+ iam_user_id = proto.Field(proto.STRING, number=2,)
+ external_id = proto.Field(proto.STRING, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index a57e24b..94ee6a8 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,6 +18,7 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
@@ -30,6 +31,22 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -45,16 +62,9 @@ def lint(session):
session.run("flake8", "google", "tests")
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
- """Run black.
-
- Format code to uniform standard.
-
- This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
- That run uses an image that doesn't have 3.6 installed. Before updating this
- check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
- """
+ """Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
session.run(
"black", *BLACK_PATHS,
@@ -70,17 +80,21 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("asyncmock", "pytest-asyncio")
- session.install(
- "mock", "pytest", "pytest-cov",
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("-e", ".")
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
@@ -101,15 +115,18 @@ def unit(session):
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
- # Sanity check: Only run tests if the environment variable is set.
- if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
- session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -122,16 +139,26 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install(
- "mock", "pytest", "google-cloud-testutils",
- )
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -152,7 +179,7 @@ def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark")
+ session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -174,9 +201,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
- # https://github.com/docascode/sphinx-docfx-yaml/issues/97
- session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ session.install(
+ "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/synth.py b/owlbot.py
similarity index 72%
rename from synth.py
rename to owlbot.py
index d773546..532c73a 100644
--- a/synth.py
+++ b/owlbot.py
@@ -19,19 +19,14 @@
import synthtool.gcp as gcp
from synthtool.languages import python
-gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
-# ----------------------------------------------------------------------------
-# Generate access approval GAPIC layer
-# ----------------------------------------------------------------------------
-library = gapic.py_library(
- service="bigquery/connection",
- version="v1",
- bazel_target=f"//google/cloud/bigquery/connection/v1:bigquery-connection-v1-py"
-)
+default_version = "v1"
+
+for library in s.get_staging_dirs(default_version):
+ s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst", "*.tar.gz"])
-s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"])
+s.remove_staging_dirs()
# ----------------------------------------------------------------------------
# Add templated files
diff --git a/renovate.json b/renovate.json
index 4fa9493..c048955 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,9 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"],
+ "pip_requirements": {
+ "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+ }
}
diff --git a/scripts/fixup_bigquery_connection_v1_keywords.py b/scripts/fixup_bigquery_connection_v1_keywords.py
index 64ee3d3..9276ace 100644
--- a/scripts/fixup_bigquery_connection_v1_keywords.py
+++ b/scripts/fixup_bigquery_connection_v1_keywords.py
@@ -1,6 +1,5 @@
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import argparse
import os
import libcst as cst
@@ -41,15 +39,14 @@ def partition(
class bigquery_connectionCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'create_connection': ('parent', 'connection', 'connection_id', ),
- 'delete_connection': ('name', ),
- 'get_connection': ('name', ),
- 'get_iam_policy': ('resource', 'options', ),
- 'list_connections': ('parent', 'page_size', 'page_token', ),
- 'set_iam_policy': ('resource', 'policy', ),
- 'test_iam_permissions': ('resource', 'permissions', ),
- 'update_connection': ('name', 'connection', 'update_mask', ),
-
+ 'create_connection': ('parent', 'connection', 'connection_id', ),
+ 'delete_connection': ('name', ),
+ 'get_connection': ('name', ),
+ 'get_iam_policy': ('resource', 'options', ),
+ 'list_connections': ('parent', 'page_size', 'page_token', ),
+ 'set_iam_policy': ('resource', 'policy', ),
+ 'test_iam_permissions': ('resource', 'permissions', ),
+ 'update_connection': ('name', 'connection', 'update_mask', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
@@ -80,7 +77,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
- cst.Element(value=arg.value)
+cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
diff --git a/setup.py b/setup.py
index 29523ec..f72d82c 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "1.0.1"
+version = "1.1.0"
package_root = os.path.abspath(os.path.dirname(__file__))
@@ -35,20 +35,23 @@
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url="https://github.com/googleapis/python-bigquery-connection",
- packages=setuptools.PEP420PackageFinder.find(),
+ packages=[
+ package
+ for package in setuptools.PEP420PackageFinder.find()
+ if package.startswith("google")
+ ],
namespace_packages=("google", "google.cloud"),
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
- "google-api-core >= 1.21.0, < 2.0.0dev",
+ "google-api-core >= 1.22.2, < 2.0.0dev",
"proto-plus >= 1.4.0",
- "grpc-google-iam-v1",
- "libcst >= 0.2.5",
+ "packaging >= 14.3",
+ "grpc-google-iam-v1 >= 0.12.3, < 0.13.0dev",
),
python_requires=">=3.6",
- scripts=["scripts/fixup_keywords.py"],
classifiers=[
- "Development Status :: 4 - Beta",
+ "Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
diff --git a/synth.metadata b/synth.metadata
deleted file mode 100644
index 4e83185..0000000
--- a/synth.metadata
+++ /dev/null
@@ -1,131 +0,0 @@
-{
- "sources": [
- {
- "git": {
- "name": ".",
- "remote": "https://github.com/googleapis/python-bigquery-connection.git",
- "sha": "f3ce3aa826173bf61b3b79803d0231c27f89e6fa"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907",
- "internalRef": "347055288"
- }
- },
- {
- "git": {
- "name": "synthtool",
- "remote": "https://github.com/googleapis/synthtool.git",
- "sha": "f94318521f63085b9ccb43d42af89f153fb39f15"
- }
- },
- {
- "git": {
- "name": "synthtool",
- "remote": "https://github.com/googleapis/synthtool.git",
- "sha": "f94318521f63085b9ccb43d42af89f153fb39f15"
- }
- }
- ],
- "destinations": [
- {
- "client": {
- "source": "googleapis",
- "apiName": "bigquery/connection",
- "apiVersion": "v1",
- "language": "python",
- "generator": "bazel"
- }
- }
- ],
- "generatedFiles": [
- ".flake8",
- ".github/CONTRIBUTING.md",
- ".github/ISSUE_TEMPLATE/bug_report.md",
- ".github/ISSUE_TEMPLATE/feature_request.md",
- ".github/ISSUE_TEMPLATE/support_request.md",
- ".github/PULL_REQUEST_TEMPLATE.md",
- ".github/release-please.yml",
- ".github/snippet-bot.yml",
- ".gitignore",
- ".kokoro/build.sh",
- ".kokoro/continuous/common.cfg",
- ".kokoro/continuous/continuous.cfg",
- ".kokoro/docker/docs/Dockerfile",
- ".kokoro/docker/docs/fetch_gpg_keys.sh",
- ".kokoro/docs/common.cfg",
- ".kokoro/docs/docs-presubmit.cfg",
- ".kokoro/docs/docs.cfg",
- ".kokoro/populate-secrets.sh",
- ".kokoro/presubmit/common.cfg",
- ".kokoro/presubmit/presubmit.cfg",
- ".kokoro/publish-docs.sh",
- ".kokoro/release.sh",
- ".kokoro/release/common.cfg",
- ".kokoro/release/release.cfg",
- ".kokoro/samples/lint/common.cfg",
- ".kokoro/samples/lint/continuous.cfg",
- ".kokoro/samples/lint/periodic.cfg",
- ".kokoro/samples/lint/presubmit.cfg",
- ".kokoro/samples/python3.6/common.cfg",
- ".kokoro/samples/python3.6/continuous.cfg",
- ".kokoro/samples/python3.6/periodic.cfg",
- ".kokoro/samples/python3.6/presubmit.cfg",
- ".kokoro/samples/python3.7/common.cfg",
- ".kokoro/samples/python3.7/continuous.cfg",
- ".kokoro/samples/python3.7/periodic.cfg",
- ".kokoro/samples/python3.7/presubmit.cfg",
- ".kokoro/samples/python3.8/common.cfg",
- ".kokoro/samples/python3.8/continuous.cfg",
- ".kokoro/samples/python3.8/periodic.cfg",
- ".kokoro/samples/python3.8/presubmit.cfg",
- ".kokoro/test-samples.sh",
- ".kokoro/trampoline.sh",
- ".kokoro/trampoline_v2.sh",
- ".pre-commit-config.yaml",
- ".trampolinerc",
- "CODE_OF_CONDUCT.md",
- "CONTRIBUTING.rst",
- "LICENSE",
- "MANIFEST.in",
- "docs/_static/custom.css",
- "docs/_templates/layout.html",
- "docs/bigquery_connection_v1/services.rst",
- "docs/bigquery_connection_v1/types.rst",
- "docs/conf.py",
- "docs/multiprocessing.rst",
- "google/cloud/bigquery_connection/__init__.py",
- "google/cloud/bigquery_connection/py.typed",
- "google/cloud/bigquery_connection_v1/__init__.py",
- "google/cloud/bigquery_connection_v1/py.typed",
- "google/cloud/bigquery_connection_v1/services/__init__.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/__init__.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/async_client.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/client.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/pagers.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py",
- "google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py",
- "google/cloud/bigquery_connection_v1/types/__init__.py",
- "google/cloud/bigquery_connection_v1/types/connection.py",
- "mypy.ini",
- "noxfile.py",
- "renovate.json",
- "scripts/decrypt-secrets.sh",
- "scripts/fixup_bigquery_connection_v1_keywords.py",
- "scripts/readme-gen/readme_gen.py",
- "scripts/readme-gen/templates/README.tmpl.rst",
- "scripts/readme-gen/templates/auth.tmpl.rst",
- "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
- "scripts/readme-gen/templates/install_deps.tmpl.rst",
- "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
- "setup.cfg",
- "testing/.gitignore",
- "tests/unit/gapic/bigquery_connection_v1/__init__.py",
- "tests/unit/gapic/bigquery_connection_v1/test_connection_service.py"
- ]
-}
\ No newline at end of file
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index 2e33cbc..fbed252 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -5,7 +5,7 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
-google-api-core==1.21.0
+google-api-core==1.26.0
proto-plus==1.4.0
-grpc-google-iam-v1==0.9.0
-libcst==0.2.5
\ No newline at end of file
+grpc-google-iam-v1==0.12.3
+packaging==14.3
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/unit/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/unit/gapic/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/bigquery_connection_v1/__init__.py b/tests/unit/gapic/bigquery_connection_v1/__init__.py
index 8b13789..4de6597 100644
--- a/tests/unit/gapic/bigquery_connection_v1/__init__.py
+++ b/tests/unit/gapic/bigquery_connection_v1/__init__.py
@@ -1 +1,15 @@
-
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py
index 846fd59..3390947 100644
--- a/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py
+++ b/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import os
import mock
+import packaging.version
import grpc
from grpc.experimental import aio
@@ -24,13 +23,13 @@
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
-from google import auth
+
from google.api_core import client_options
-from google.api_core import exceptions
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
-from google.auth import credentials
+from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.bigquery_connection_v1.services.connection_service import (
ConnectionServiceAsyncClient,
@@ -40,14 +39,32 @@
)
from google.cloud.bigquery_connection_v1.services.connection_service import pagers
from google.cloud.bigquery_connection_v1.services.connection_service import transports
+from google.cloud.bigquery_connection_v1.services.connection_service.transports.base import (
+ _GOOGLE_AUTH_VERSION,
+)
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import options_pb2 as options # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
+from google.iam.v1 import iam_policy_pb2 # type: ignore
+from google.iam.v1 import options_pb2 # type: ignore
+from google.iam.v1 import policy_pb2 # type: ignore
from google.oauth2 import service_account
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.type import expr_pb2 as expr # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
+from google.type import expr_pb2 # type: ignore
+import google.auth
+
+
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
+# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
+requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth < 1.25.0",
+)
+requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth >= 1.25.0",
+)
def client_cert_source_callback():
@@ -96,26 +113,60 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient]
+ "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient,]
+)
+def test_connection_service_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "bigqueryconnection.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient,]
+)
+def test_connection_service_client_service_account_always_use_jwt(client_class):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ client = client_class(credentials=creds)
+ use_jwt.assert_called_with(True)
+
+
+@pytest.mark.parametrize(
+ "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient,]
)
def test_connection_service_client_from_service_account_file(client_class):
- creds = credentials.AnonymousCredentials()
+ creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "bigqueryconnection.googleapis.com:443"
def test_connection_service_client_get_transport_class():
transport = ConnectionServiceClient.get_transport_class()
- assert transport == transports.ConnectionServiceGrpcTransport
+ available_transports = [
+ transports.ConnectionServiceGrpcTransport,
+ ]
+ assert transport in available_transports
transport = ConnectionServiceClient.get_transport_class("grpc")
assert transport == transports.ConnectionServiceGrpcTransport
@@ -147,7 +198,7 @@ def test_connection_service_client_client_options(
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc:
- transport = transport_class(credentials=credentials.AnonymousCredentials())
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
@@ -166,7 +217,7 @@ def test_connection_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -182,7 +233,7 @@ def test_connection_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -198,7 +249,7 @@ def test_connection_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -226,7 +277,7 @@ def test_connection_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -287,29 +338,25 @@ def test_connection_service_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -318,66 +365,53 @@ def test_connection_service_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -403,7 +437,7 @@ def test_connection_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -433,7 +467,7 @@ def test_connection_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -452,7 +486,7 @@ def test_connection_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -462,7 +496,7 @@ def test_create_connection(
transport: str = "grpc", request_type=gcbc_connection.CreateConnectionRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -485,29 +519,20 @@ def test_create_connection(
instance_id="instance_id_value"
),
)
-
response = client.create_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == gcbc_connection.CreateConnectionRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, gcbc_connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -515,13 +540,30 @@ def test_create_connection_from_dict():
test_create_connection(request_type=dict)
+def test_create_connection_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_connection), "__call__"
+ ) as call:
+ client.create_connection()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == gcbc_connection.CreateConnectionRequest()
+
+
@pytest.mark.asyncio
async def test_create_connection_async(
transport: str = "grpc_asyncio",
request_type=gcbc_connection.CreateConnectionRequest,
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -543,28 +585,20 @@ async def test_create_connection_async(
has_credential=True,
)
)
-
response = await client.create_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == gcbc_connection.CreateConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbc_connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -574,11 +608,12 @@ async def test_create_connection_async_from_dict():
def test_create_connection_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbc_connection.CreateConnectionRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -586,7 +621,6 @@ def test_create_connection_field_headers():
type(client.transport.create_connection), "__call__"
) as call:
call.return_value = gcbc_connection.Connection()
-
client.create_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -602,12 +636,13 @@ def test_create_connection_field_headers():
@pytest.mark.asyncio
async def test_create_connection_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbc_connection.CreateConnectionRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -617,7 +652,6 @@ async def test_create_connection_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbc_connection.Connection()
)
-
await client.create_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -631,7 +665,7 @@ async def test_create_connection_field_headers_async():
def test_create_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -639,7 +673,6 @@ def test_create_connection_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_connection(
@@ -652,16 +685,13 @@ def test_create_connection_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
assert args[0].connection == gcbc_connection.Connection(name="name_value")
-
assert args[0].connection_id == "connection_id_value"
def test_create_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -677,7 +707,7 @@ def test_create_connection_flattened_error():
@pytest.mark.asyncio
async def test_create_connection_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -702,18 +732,15 @@ async def test_create_connection_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
assert args[0].connection == gcbc_connection.Connection(name="name_value")
-
assert args[0].connection_id == "connection_id_value"
@pytest.mark.asyncio
async def test_create_connection_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -731,7 +758,7 @@ def test_get_connection(
transport: str = "grpc", request_type=connection.GetConnectionRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -750,29 +777,20 @@ def test_get_connection(
has_credential=True,
cloud_sql=connection.CloudSqlProperties(instance_id="instance_id_value"),
)
-
response = client.get_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.GetConnectionRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -780,12 +798,27 @@ def test_get_connection_from_dict():
test_get_connection(request_type=dict)
+def test_get_connection_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
+ client.get_connection()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == connection.GetConnectionRequest()
+
+
@pytest.mark.asyncio
async def test_get_connection_async(
transport: str = "grpc_asyncio", request_type=connection.GetConnectionRequest
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -805,28 +838,20 @@ async def test_get_connection_async(
has_credential=True,
)
)
-
response = await client.get_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.GetConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -836,17 +861,17 @@ async def test_get_connection_async_from_dict():
def test_get_connection_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.GetConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
call.return_value = connection.Connection()
-
client.get_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -862,12 +887,13 @@ def test_get_connection_field_headers():
@pytest.mark.asyncio
async def test_get_connection_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.GetConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -875,7 +901,6 @@ async def test_get_connection_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.Connection()
)
-
await client.get_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -889,13 +914,12 @@ async def test_get_connection_field_headers_async():
def test_get_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.Connection()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_connection(name="name_value",)
@@ -904,12 +928,11 @@ def test_get_connection_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_get_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -922,7 +945,7 @@ def test_get_connection_flattened_error():
@pytest.mark.asyncio
async def test_get_connection_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -941,14 +964,13 @@ async def test_get_connection_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_connection_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -963,7 +985,7 @@ def test_list_connections(
transport: str = "grpc", request_type=connection.ListConnectionsRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -976,19 +998,15 @@ def test_list_connections(
call.return_value = connection.ListConnectionsResponse(
next_page_token="next_page_token_value",
)
-
response = client.list_connections(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.ListConnectionsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListConnectionsPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -996,12 +1014,27 @@ def test_list_connections_from_dict():
test_list_connections(request_type=dict)
+def test_list_connections_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
+ client.list_connections()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == connection.ListConnectionsRequest()
+
+
@pytest.mark.asyncio
async def test_list_connections_async(
transport: str = "grpc_asyncio", request_type=connection.ListConnectionsRequest
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1014,18 +1047,15 @@ async def test_list_connections_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.ListConnectionsResponse(next_page_token="next_page_token_value",)
)
-
response = await client.list_connections(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.ListConnectionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConnectionsAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -1035,17 +1065,17 @@ async def test_list_connections_async_from_dict():
def test_list_connections_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.ListConnectionsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
call.return_value = connection.ListConnectionsResponse()
-
client.list_connections(request)
# Establish that the underlying gRPC stub method was called.
@@ -1061,12 +1091,13 @@ def test_list_connections_field_headers():
@pytest.mark.asyncio
async def test_list_connections_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.ListConnectionsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1074,7 +1105,6 @@ async def test_list_connections_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.ListConnectionsResponse()
)
-
await client.list_connections(request)
# Establish that the underlying gRPC stub method was called.
@@ -1088,13 +1118,12 @@ async def test_list_connections_field_headers_async():
def test_list_connections_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.ListConnectionsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_connections(parent="parent_value",)
@@ -1103,12 +1132,11 @@ def test_list_connections_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
def test_list_connections_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1121,7 +1149,7 @@ def test_list_connections_flattened_error():
@pytest.mark.asyncio
async def test_list_connections_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1140,14 +1168,13 @@ async def test_list_connections_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_connections_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1159,7 +1186,7 @@ async def test_list_connections_flattened_error_async():
def test_list_connections_pager():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
@@ -1197,7 +1224,7 @@ def test_list_connections_pager():
def test_list_connections_pages():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
@@ -1227,7 +1254,9 @@ def test_list_connections_pages():
@pytest.mark.asyncio
async def test_list_connections_async_pager():
- client = ConnectionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = ConnectionServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1264,7 +1293,9 @@ async def test_list_connections_async_pager():
@pytest.mark.asyncio
async def test_list_connections_async_pages():
- client = ConnectionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = ConnectionServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1300,7 +1331,7 @@ def test_update_connection(
transport: str = "grpc", request_type=gcbc_connection.UpdateConnectionRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1323,29 +1354,20 @@ def test_update_connection(
instance_id="instance_id_value"
),
)
-
response = client.update_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == gcbc_connection.UpdateConnectionRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, gcbc_connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -1353,13 +1375,30 @@ def test_update_connection_from_dict():
test_update_connection(request_type=dict)
+def test_update_connection_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_connection), "__call__"
+ ) as call:
+ client.update_connection()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == gcbc_connection.UpdateConnectionRequest()
+
+
@pytest.mark.asyncio
async def test_update_connection_async(
transport: str = "grpc_asyncio",
request_type=gcbc_connection.UpdateConnectionRequest,
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1381,28 +1420,20 @@ async def test_update_connection_async(
has_credential=True,
)
)
-
response = await client.update_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == gcbc_connection.UpdateConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbc_connection.Connection)
-
assert response.name == "name_value"
-
assert response.friendly_name == "friendly_name_value"
-
assert response.description == "description_value"
-
assert response.creation_time == 1379
-
assert response.last_modified_time == 1890
-
assert response.has_credential is True
@@ -1412,11 +1443,12 @@ async def test_update_connection_async_from_dict():
def test_update_connection_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbc_connection.UpdateConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1424,7 +1456,6 @@ def test_update_connection_field_headers():
type(client.transport.update_connection), "__call__"
) as call:
call.return_value = gcbc_connection.Connection()
-
client.update_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -1440,12 +1471,13 @@ def test_update_connection_field_headers():
@pytest.mark.asyncio
async def test_update_connection_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbc_connection.UpdateConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1455,7 +1487,6 @@ async def test_update_connection_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbc_connection.Connection()
)
-
await client.update_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -1469,7 +1500,7 @@ async def test_update_connection_field_headers_async():
def test_update_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1477,29 +1508,25 @@ def test_update_connection_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_connection(
name="name_value",
connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
-
assert args[0].connection == gcbc_connection.Connection(name="name_value")
-
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+ assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1508,14 +1535,14 @@ def test_update_connection_flattened_error():
gcbc_connection.UpdateConnectionRequest(),
name="name_value",
connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_connection_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1533,25 +1560,22 @@ async def test_update_connection_flattened_async():
response = await client.update_connection(
name="name_value",
connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
-
assert args[0].connection == gcbc_connection.Connection(name="name_value")
-
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+ assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_connection_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1561,7 +1585,7 @@ async def test_update_connection_flattened_error_async():
gcbc_connection.UpdateConnectionRequest(),
name="name_value",
connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@@ -1569,7 +1593,7 @@ def test_delete_connection(
transport: str = "grpc", request_type=connection.DeleteConnectionRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1582,13 +1606,11 @@ def test_delete_connection(
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
response = client.delete_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.DeleteConnectionRequest()
# Establish that the response is the type that we expect.
@@ -1599,12 +1621,29 @@ def test_delete_connection_from_dict():
test_delete_connection(request_type=dict)
+def test_delete_connection_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_connection), "__call__"
+ ) as call:
+ client.delete_connection()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == connection.DeleteConnectionRequest()
+
+
@pytest.mark.asyncio
async def test_delete_connection_async(
transport: str = "grpc_asyncio", request_type=connection.DeleteConnectionRequest
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1617,13 +1656,11 @@ async def test_delete_connection_async(
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
response = await client.delete_connection(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == connection.DeleteConnectionRequest()
# Establish that the response is the type that we expect.
@@ -1636,11 +1673,12 @@ async def test_delete_connection_async_from_dict():
def test_delete_connection_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.DeleteConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1648,7 +1686,6 @@ def test_delete_connection_field_headers():
type(client.transport.delete_connection), "__call__"
) as call:
call.return_value = None
-
client.delete_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -1664,12 +1701,13 @@ def test_delete_connection_field_headers():
@pytest.mark.asyncio
async def test_delete_connection_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = connection.DeleteConnectionRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1677,7 +1715,6 @@ async def test_delete_connection_field_headers_async():
type(client.transport.delete_connection), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
await client.delete_connection(request)
# Establish that the underlying gRPC stub method was called.
@@ -1691,7 +1728,7 @@ async def test_delete_connection_field_headers_async():
def test_delete_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1699,7 +1736,6 @@ def test_delete_connection_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_connection(name="name_value",)
@@ -1708,12 +1744,11 @@ def test_delete_connection_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_delete_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1726,7 +1761,7 @@ def test_delete_connection_flattened_error():
@pytest.mark.asyncio
async def test_delete_connection_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1745,14 +1780,13 @@ async def test_delete_connection_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_connection_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1764,10 +1798,10 @@ async def test_delete_connection_flattened_error_async():
def test_get_iam_policy(
- transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest
+ transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1777,22 +1811,17 @@ def test_get_iam_policy(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
-
+ call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",)
response = client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.GetIamPolicyRequest()
+ assert args[0] == iam_policy_pb2.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
-
- assert isinstance(response, policy.Policy)
-
+ assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
-
assert response.etag == b"etag_blob"
@@ -1800,12 +1829,27 @@ def test_get_iam_policy_from_dict():
test_get_iam_policy(request_type=dict)
+def test_get_iam_policy_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
+ client.get_iam_policy()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == iam_policy_pb2.GetIamPolicyRequest()
+
+
@pytest.mark.asyncio
async def test_get_iam_policy_async(
- transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+ transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1816,22 +1860,18 @@ async def test_get_iam_policy_async(
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- policy.Policy(version=774, etag=b"etag_blob",)
+ policy_pb2.Policy(version=774, etag=b"etag_blob",)
)
-
response = await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.GetIamPolicyRequest()
+ assert args[0] == iam_policy_pb2.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
- assert isinstance(response, policy.Policy)
-
+ assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
-
assert response.etag == b"etag_blob"
@@ -1841,17 +1881,17 @@ async def test_get_iam_policy_async_from_dict():
def test_get_iam_policy_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.GetIamPolicyRequest()
+ request = iam_policy_pb2.GetIamPolicyRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
@@ -1867,18 +1907,18 @@ def test_get_iam_policy_field_headers():
@pytest.mark.asyncio
async def test_get_iam_policy_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.GetIamPolicyRequest()
+ request = iam_policy_pb2.GetIamPolicyRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
-
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
@@ -1892,29 +1932,27 @@ async def test_get_iam_policy_field_headers_async():
def test_get_iam_policy_from_dict_foreign():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
response = client.get_iam_policy(
request={
"resource": "resource_value",
- "options": options.GetPolicyOptions(requested_policy_version=2598),
+ "options": options_pb2.GetPolicyOptions(requested_policy_version=2598),
}
)
call.assert_called()
def test_get_iam_policy_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_iam_policy(resource="resource_value",)
@@ -1923,33 +1961,32 @@ def test_get_iam_policy_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
def test_get_iam_policy_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_iam_policy(
- iam_policy.GetIamPolicyRequest(), resource="resource_value",
+ iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value",
)
@pytest.mark.asyncio
async def test_get_iam_policy_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
+ call.return_value = policy_pb2.Policy()
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_iam_policy(resource="resource_value",)
@@ -1958,29 +1995,28 @@ async def test_get_iam_policy_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
@pytest.mark.asyncio
async def test_get_iam_policy_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_iam_policy(
- iam_policy.GetIamPolicyRequest(), resource="resource_value",
+ iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value",
)
def test_set_iam_policy(
- transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest
+ transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1990,22 +2026,17 @@ def test_set_iam_policy(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
-
+ call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",)
response = client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.SetIamPolicyRequest()
+ assert args[0] == iam_policy_pb2.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
-
- assert isinstance(response, policy.Policy)
-
+ assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
-
assert response.etag == b"etag_blob"
@@ -2013,12 +2044,27 @@ def test_set_iam_policy_from_dict():
test_set_iam_policy(request_type=dict)
+def test_set_iam_policy_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
+ client.set_iam_policy()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == iam_policy_pb2.SetIamPolicyRequest()
+
+
@pytest.mark.asyncio
async def test_set_iam_policy_async(
- transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+ transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2029,22 +2075,18 @@ async def test_set_iam_policy_async(
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- policy.Policy(version=774, etag=b"etag_blob",)
+ policy_pb2.Policy(version=774, etag=b"etag_blob",)
)
-
response = await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.SetIamPolicyRequest()
+ assert args[0] == iam_policy_pb2.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
- assert isinstance(response, policy.Policy)
-
+ assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
-
assert response.etag == b"etag_blob"
@@ -2054,17 +2096,17 @@ async def test_set_iam_policy_async_from_dict():
def test_set_iam_policy_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.SetIamPolicyRequest()
+ request = iam_policy_pb2.SetIamPolicyRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
@@ -2080,18 +2122,18 @@ def test_set_iam_policy_field_headers():
@pytest.mark.asyncio
async def test_set_iam_policy_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.SetIamPolicyRequest()
+ request = iam_policy_pb2.SetIamPolicyRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
-
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
@@ -2105,29 +2147,27 @@ async def test_set_iam_policy_field_headers_async():
def test_set_iam_policy_from_dict_foreign():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
response = client.set_iam_policy(
request={
"resource": "resource_value",
- "policy": policy.Policy(version=774),
+ "policy": policy_pb2.Policy(version=774),
}
)
call.assert_called()
def test_set_iam_policy_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
+ call.return_value = policy_pb2.Policy()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.set_iam_policy(resource="resource_value",)
@@ -2136,33 +2176,32 @@ def test_set_iam_policy_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
def test_set_iam_policy_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_iam_policy(
- iam_policy.SetIamPolicyRequest(), resource="resource_value",
+ iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value",
)
@pytest.mark.asyncio
async def test_set_iam_policy_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
+ call.return_value = policy_pb2.Policy()
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.set_iam_policy(resource="resource_value",)
@@ -2171,29 +2210,28 @@ async def test_set_iam_policy_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
@pytest.mark.asyncio
async def test_set_iam_policy_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.set_iam_policy(
- iam_policy.SetIamPolicyRequest(), resource="resource_value",
+ iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value",
)
def test_test_iam_permissions(
- transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest
+ transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest
):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2205,22 +2243,18 @@ def test_test_iam_permissions(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse(
+ call.return_value = iam_policy_pb2.TestIamPermissionsResponse(
permissions=["permissions_value"],
)
-
response = client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.TestIamPermissionsRequest()
+ assert args[0] == iam_policy_pb2.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
-
- assert isinstance(response, iam_policy.TestIamPermissionsResponse)
-
+ assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -2228,12 +2262,30 @@ def test_test_iam_permissions_from_dict():
test_test_iam_permissions(request_type=dict)
+def test_test_iam_permissions_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConnectionServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.test_iam_permissions), "__call__"
+ ) as call:
+ client.test_iam_permissions()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == iam_policy_pb2.TestIamPermissionsRequest()
+
+
@pytest.mark.asyncio
async def test_test_iam_permissions_async(
- transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+ transport: str = "grpc_asyncio",
+ request_type=iam_policy_pb2.TestIamPermissionsRequest,
):
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2246,20 +2298,19 @@ async def test_test_iam_permissions_async(
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],)
+ iam_policy_pb2.TestIamPermissionsResponse(
+ permissions=["permissions_value"],
+ )
)
-
response = await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
- assert args[0] == iam_policy.TestIamPermissionsRequest()
+ assert args[0] == iam_policy_pb2.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
- assert isinstance(response, iam_policy.TestIamPermissionsResponse)
-
+ assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -2269,19 +2320,19 @@ async def test_test_iam_permissions_async_from_dict():
def test_test_iam_permissions_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.TestIamPermissionsRequest()
+ request = iam_policy_pb2.TestIamPermissionsRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
- call.return_value = iam_policy.TestIamPermissionsResponse()
-
+ call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
@@ -2297,12 +2348,13 @@ def test_test_iam_permissions_field_headers():
@pytest.mark.asyncio
async def test_test_iam_permissions_field_headers_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = iam_policy.TestIamPermissionsRequest()
+ request = iam_policy_pb2.TestIamPermissionsRequest()
+
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2310,9 +2362,8 @@ async def test_test_iam_permissions_field_headers_async():
type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- iam_policy.TestIamPermissionsResponse()
+ iam_policy_pb2.TestIamPermissionsResponse()
)
-
await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
@@ -2326,14 +2377,13 @@ async def test_test_iam_permissions_field_headers_async():
def test_test_iam_permissions_from_dict_foreign():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse()
-
+ call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
response = client.test_iam_permissions(
request={
"resource": "resource_value",
@@ -2344,15 +2394,14 @@ def test_test_iam_permissions_from_dict_foreign():
def test_test_iam_permissions_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse()
-
+ call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.test_iam_permissions(
@@ -2363,20 +2412,18 @@ def test_test_iam_permissions_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
-
assert args[0].permissions == ["permissions_value"]
def test_test_iam_permissions_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.test_iam_permissions(
- iam_policy.TestIamPermissionsRequest(),
+ iam_policy_pb2.TestIamPermissionsRequest(),
resource="resource_value",
permissions=["permissions_value"],
)
@@ -2385,7 +2432,7 @@ def test_test_iam_permissions_flattened_error():
@pytest.mark.asyncio
async def test_test_iam_permissions_flattened_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2393,10 +2440,10 @@ async def test_test_iam_permissions_flattened_async():
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse()
+ call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- iam_policy.TestIamPermissionsResponse()
+ iam_policy_pb2.TestIamPermissionsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
@@ -2408,23 +2455,21 @@ async def test_test_iam_permissions_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].resource == "resource_value"
-
assert args[0].permissions == ["permissions_value"]
@pytest.mark.asyncio
async def test_test_iam_permissions_flattened_error_async():
client = ConnectionServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.test_iam_permissions(
- iam_policy.TestIamPermissionsRequest(),
+ iam_policy_pb2.TestIamPermissionsRequest(),
resource="resource_value",
permissions=["permissions_value"],
)
@@ -2433,16 +2478,16 @@ async def test_test_iam_permissions_flattened_error_async():
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ConnectionServiceClient(
@@ -2452,7 +2497,7 @@ def test_credentials_transport_error():
# It is an error to provide scopes and a transport instance.
transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ConnectionServiceClient(
@@ -2463,7 +2508,7 @@ def test_credentials_transport_error():
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
client = ConnectionServiceClient(transport=transport)
assert client.transport is transport
@@ -2472,13 +2517,13 @@ def test_transport_instance():
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ConnectionServiceGrpcAsyncIOTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@@ -2493,23 +2538,23 @@ def test_transport_get_channel():
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = ConnectionServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.ConnectionServiceGrpcTransport,)
def test_connection_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
- with pytest.raises(exceptions.DuplicateCredentialArgs):
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ConnectionServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
@@ -2521,7 +2566,7 @@ def test_connection_service_base_transport():
) as Transport:
Transport.return_value = None
transport = transports.ConnectionServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
@@ -2541,15 +2586,40 @@ def test_connection_service_base_transport():
getattr(transport, method)(request=object())
+@requires_google_auth_gte_1_25_0
def test_connection_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
- auth, "load_credentials_from_file"
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.ConnectionServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_connection_service_base_transport_with_credentials_file_old_google_auth():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ConnectionServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
@@ -2565,19 +2635,36 @@ def test_connection_service_base_transport_with_credentials_file():
def test_connection_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(auth, "default") as adc, mock.patch(
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ConnectionServiceTransport()
adc.assert_called_once()
+@requires_google_auth_gte_1_25_0
def test_connection_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ ConnectionServiceClient()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id=None,
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_connection_service_auth_adc_old_google_auth():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ConnectionServiceClient()
adc.assert_called_once_with(
scopes=(
@@ -2588,14 +2675,44 @@ def test_connection_service_auth_adc():
)
-def test_connection_service_transport_auth_adc():
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_gte_1_25_0
+def test_connection_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk", quota_project_id="octopus"
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+ adc.assert_called_once_with(
+ scopes=["1", "2"],
+ default_scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id="octopus",
)
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_lt_1_25_0
+def test_connection_service_transport_auth_adc_old_google_auth(transport_class):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/bigquery",
@@ -2605,9 +2722,95 @@ def test_connection_service_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.ConnectionServiceGrpcTransport, grpc_helpers),
+ (transports.ConnectionServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+def test_connection_service_transport_create_channel(transport_class, grpc_helpers):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "bigqueryconnection.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ default_scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ scopes=["1", "2"],
+ default_host="bigqueryconnection.googleapis.com",
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_connection_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = ga_credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_connection_service_host_no_port():
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryconnection.googleapis.com"
),
@@ -2617,7 +2820,7 @@ def test_connection_service_host_no_port():
def test_connection_service_host_with_port():
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryconnection.googleapis.com:8000"
),
@@ -2626,7 +2829,7 @@ def test_connection_service_host_with_port():
def test_connection_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ConnectionServiceGrpcTransport(
@@ -2638,7 +2841,7 @@ def test_connection_service_grpc_transport_channel():
def test_connection_service_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ConnectionServiceGrpcAsyncIOTransport(
@@ -2649,6 +2852,8 @@ def test_connection_service_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2663,7 +2868,7 @@ def test_connection_service_transport_channel_mtls_with_client_cert_source(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -2671,9 +2876,9 @@ def test_connection_service_transport_channel_mtls_with_client_cert_source(
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
- cred = credentials.AnonymousCredentials()
+ cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
- with mock.patch.object(auth, "default") as adc:
+ with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
@@ -2704,6 +2909,8 @@ def test_connection_service_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2719,7 +2926,7 @@ def test_connection_service_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -2755,7 +2962,6 @@ def test_connection_path():
project = "squid"
location = "clam"
connection = "whelk"
-
expected = "projects/{project}/locations/{location}/connections/{connection}".format(
project=project, location=location, connection=connection,
)
@@ -2778,7 +2984,6 @@ def test_parse_connection_path():
def test_common_billing_account_path():
billing_account = "cuttlefish"
-
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -2799,7 +3004,6 @@ def test_parse_common_billing_account_path():
def test_common_folder_path():
folder = "winkle"
-
expected = "folders/{folder}".format(folder=folder,)
actual = ConnectionServiceClient.common_folder_path(folder)
assert expected == actual
@@ -2818,7 +3022,6 @@ def test_parse_common_folder_path():
def test_common_organization_path():
organization = "scallop"
-
expected = "organizations/{organization}".format(organization=organization,)
actual = ConnectionServiceClient.common_organization_path(organization)
assert expected == actual
@@ -2837,7 +3040,6 @@ def test_parse_common_organization_path():
def test_common_project_path():
project = "squid"
-
expected = "projects/{project}".format(project=project,)
actual = ConnectionServiceClient.common_project_path(project)
assert expected == actual
@@ -2857,7 +3059,6 @@ def test_parse_common_project_path():
def test_common_location_path():
project = "whelk"
location = "octopus"
-
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -2884,7 +3085,7 @@ def test_client_withDEFAULT_CLIENT_INFO():
transports.ConnectionServiceTransport, "_prep_wrapped_messages"
) as prep:
client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@@ -2893,6 +3094,6 @@ def test_client_withDEFAULT_CLIENT_INFO():
) as prep:
transport_class = ConnectionServiceClient.get_transport_class()
transport = transport_class(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)