diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index d08d984..2857de2 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,7 +5,7 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# The @googleapis/yoshi-python is the default owner for changes in this repo
-* @googleapis/yoshi-python
+* @googleapis/yoshi-python @googleapis/cicd
-/samples/ @googleapis/python-samples-owners
\ No newline at end of file
+/samples/ @googleapis/python-samples-owners
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..fc281c0
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52..b4243ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 729de9c..cd54992 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation
python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
+
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 0000000..c581fea
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-binary-authorization
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 0000000..cf5de74
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 763fd31..fa4baf8 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -24,87 +28,19 @@ cd github/python-binary-authorization
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the Build Cop Bot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
- $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 719bcd5..4af6cdc 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
"KOKORO_GITHUB_COMMIT"
"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For Build Cop Bot
+ # For FlakyBot
"KOKORO_GITHUB_COMMIT_URL"
"KOKORO_GITHUB_PULL_REQUEST_URL"
)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a9024b1..32302e4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -12,6 +12,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.4
+ rev: 3.9.0
hooks:
- id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index af5f987..9f2ea49 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -8,5 +8,6 @@
"language": "python",
"repo": "googleapis/python-binary-authorization",
"distribution_name": "google-cloud-binary-authorization",
- "api_id": "binaryauthorization.googleapis.com"
- }
\ No newline at end of file
+ "api_id": "binaryauthorization.googleapis.com",
+ "codeowner_team": "@googleapis/cicd"
+ }
diff --git a/.trampolinerc b/.trampolinerc
index c7d663a..383b6ec 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -18,7 +18,6 @@
required_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
)
# Add env vars which are passed down into the container here.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 89a4e55..fae5198 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,17 @@
# Changelog
+### [0.1.1](https://www.github.com/googleapis/python-binary-authorization/compare/v0.1.0...v0.1.1) (2021-04-01)
+
+
+### Bug Fixes
+
+* use correct retry deadline ([#7](https://www.github.com/googleapis/python-binary-authorization/issues/7)) ([3f9bfc2](https://www.github.com/googleapis/python-binary-authorization/commit/3f9bfc2b1c5b6d520716b194daf175e1030135b0))
+
+
+### Documentation
+
+* update python contributing guide ([#9](https://www.github.com/googleapis/python-binary-authorization/issues/9)) ([b6e095f](https://www.github.com/googleapis/python-binary-authorization/commit/b6e095ff6a1f7422e9f1ce9132d32871f800aab7))
+
## 0.1.0 (2021-01-08)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index a3d05c9..5aadca4 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1..e783f4c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py
index 6caa94e..8876be6 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py
@@ -104,12 +104,36 @@ class BinauthzManagementServiceV1Beta1AsyncClient:
BinauthzManagementServiceV1Beta1Client.parse_common_location_path
)
- from_service_account_info = (
- BinauthzManagementServiceV1Beta1Client.from_service_account_info
- )
- from_service_account_file = (
- BinauthzManagementServiceV1Beta1Client.from_service_account_file
- )
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ BinauthzManagementServiceV1Beta1AsyncClient: The constructed client.
+ """
+ return BinauthzManagementServiceV1Beta1Client.from_service_account_info.__func__(BinauthzManagementServiceV1Beta1AsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ BinauthzManagementServiceV1Beta1AsyncClient: The constructed client.
+ """
+ return BinauthzManagementServiceV1Beta1Client.from_service_account_file.__func__(BinauthzManagementServiceV1Beta1AsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
@@ -220,9 +244,8 @@ async def get_policy(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Policy:
- A
- [policy][google.cloud.binaryauthorization.v1beta1.Policy]
- for container image binary authorization.
+ A [policy][google.cloud.binaryauthorization.v1beta1.Policy] for container
+ image binary authorization.
"""
# Create or coerce a protobuf request object.
@@ -254,6 +277,7 @@ async def get_policy(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -313,9 +337,8 @@ async def update_policy(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Policy:
- A
- [policy][google.cloud.binaryauthorization.v1beta1.Policy]
- for container image binary authorization.
+ A [policy][google.cloud.binaryauthorization.v1beta1.Policy] for container
+ image binary authorization.
"""
# Create or coerce a protobuf request object.
@@ -347,6 +370,7 @@ async def update_policy(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -426,9 +450,9 @@ async def create_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
@@ -509,9 +533,9 @@ async def get_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
@@ -543,6 +567,7 @@ async def get_attestor(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -599,9 +624,9 @@ async def update_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
@@ -633,6 +658,7 @@ async def update_attestor(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -723,6 +749,7 @@ async def list_attestors(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -809,6 +836,7 @@ async def delete_attestor(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py
index c724f1b..aac65a5 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py
@@ -315,21 +315,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -372,7 +368,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -421,9 +417,8 @@ def get_policy(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Policy:
- A
- [policy][google.cloud.binaryauthorization.v1beta1.Policy]
- for container image binary authorization.
+ A [policy][google.cloud.binaryauthorization.v1beta1.Policy] for container
+ image binary authorization.
"""
# Create or coerce a protobuf request object.
@@ -507,9 +502,8 @@ def update_policy(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Policy:
- A
- [policy][google.cloud.binaryauthorization.v1beta1.Policy]
- for container image binary authorization.
+ A [policy][google.cloud.binaryauthorization.v1beta1.Policy] for container
+ image binary authorization.
"""
# Create or coerce a protobuf request object.
@@ -613,9 +607,9 @@ def create_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
@@ -697,9 +691,9 @@ def get_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
@@ -780,9 +774,9 @@ def update_attestor(
Returns:
google.cloud.binaryauthorization_v1beta1.types.Attestor:
- An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image
- artifacts. An existing attestor cannot be modified
- except where indicated.
+ An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests
+ to container image artifacts. An existing attestor
+ cannot be modified except where indicated.
"""
# Create or coerce a protobuf request object.
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/pagers.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/pagers.py
index ca31f7d..42c4659 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/pagers.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.binaryauthorization_v1beta1.types import resources
from google.cloud.binaryauthorization_v1beta1.types import service
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/base.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/base.py
index 9c18cfc..9bc0a22 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/base.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/base.py
@@ -71,10 +71,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -82,6 +82,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -91,20 +94,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -117,6 +117,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -130,6 +131,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -146,6 +148,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -159,6 +162,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -172,6 +176,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -185,6 +190,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc.py
index 5b9cae2..e2a2f5b 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc.py
@@ -67,6 +67,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -97,6 +98,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -111,72 +116,60 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -184,17 +177,8 @@ def __init__(
],
)
- self._stubs = {} # type: Dict[str, Callable]
-
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
@@ -208,7 +192,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
diff --git a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc_asyncio.py b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc_asyncio.py
index bfe3486..4b621da 100644
--- a/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc_asyncio.py
+++ b/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc_asyncio.py
@@ -71,7 +71,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -111,6 +111,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -142,12 +143,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -156,72 +161,60 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -229,17 +222,8 @@ def __init__(
],
)
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
-
- self._stubs = {}
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
diff --git a/google/cloud/binaryauthorization_v1beta1/types/__init__.py b/google/cloud/binaryauthorization_v1beta1/types/__init__.py
index 97a676c..b4424d4 100644
--- a/google/cloud/binaryauthorization_v1beta1/types/__init__.py
+++ b/google/cloud/binaryauthorization_v1beta1/types/__init__.py
@@ -16,39 +16,39 @@
#
from .resources import (
- Policy,
- AdmissionWhitelistPattern,
AdmissionRule,
+ AdmissionWhitelistPattern,
Attestor,
- UserOwnedDrydockNote,
- PkixPublicKey,
AttestorPublicKey,
+ PkixPublicKey,
+ Policy,
+ UserOwnedDrydockNote,
)
from .service import (
- GetPolicyRequest,
- UpdatePolicyRequest,
CreateAttestorRequest,
+ DeleteAttestorRequest,
GetAttestorRequest,
- UpdateAttestorRequest,
+ GetPolicyRequest,
ListAttestorsRequest,
ListAttestorsResponse,
- DeleteAttestorRequest,
+ UpdateAttestorRequest,
+ UpdatePolicyRequest,
)
__all__ = (
- "Policy",
- "AdmissionWhitelistPattern",
"AdmissionRule",
+ "AdmissionWhitelistPattern",
"Attestor",
- "UserOwnedDrydockNote",
- "PkixPublicKey",
"AttestorPublicKey",
- "GetPolicyRequest",
- "UpdatePolicyRequest",
+ "PkixPublicKey",
+ "Policy",
+ "UserOwnedDrydockNote",
"CreateAttestorRequest",
+ "DeleteAttestorRequest",
"GetAttestorRequest",
- "UpdateAttestorRequest",
+ "GetPolicyRequest",
"ListAttestorsRequest",
"ListAttestorsResponse",
- "DeleteAttestorRequest",
+ "UpdateAttestorRequest",
+ "UpdatePolicyRequest",
)
diff --git a/google/cloud/binaryauthorization_v1beta1/types/resources.py b/google/cloud/binaryauthorization_v1beta1/types/resources.py
index 2e9e016..7866476 100644
--- a/google/cloud/binaryauthorization_v1beta1/types/resources.py
+++ b/google/cloud/binaryauthorization_v1beta1/types/resources.py
@@ -55,7 +55,7 @@ class Policy(proto.Message):
effect when specified inside a global admission
policy.
admission_whitelist_patterns (Sequence[google.cloud.binaryauthorization_v1beta1.types.AdmissionWhitelistPattern]):
- Optional. Admission policy whitelisting. A
+ Optional. Admission policy allowlisting. A
matching admission request will always be
permitted. This feature is typically used to
exclude Google or third-party infrastructure
@@ -107,14 +107,14 @@ class GlobalPolicyEvaluationMode(proto.Enum):
class AdmissionWhitelistPattern(proto.Message):
- r"""An [admission whitelist
+ r"""An [admission allowlist
pattern][google.cloud.binaryauthorization.v1beta1.AdmissionWhitelistPattern]
exempts images from checks by [admission
rules][google.cloud.binaryauthorization.v1beta1.AdmissionRule].
Attributes:
name_pattern (str):
- An image name pattern to whitelist, in the form
+ An image name pattern to allow, in the form
``registry/path/to/image``. This supports a trailing ``*``
as a wildcard, but this is allowed only in text after the
``registry/`` part.
@@ -132,7 +132,7 @@ class AdmissionRule(proto.Message):
all pod creations will be allowed, or that all pod creations will be
denied.
- Images matching an [admission whitelist
+ Images matching an [admission allowlist
pattern][google.cloud.binaryauthorization.v1beta1.AdmissionWhitelistPattern]
are exempted from admission rules and will never block a pod
creation.
diff --git a/noxfile.py b/noxfile.py
index b6b128d..4d37cd3 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,6 +18,7 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
@@ -28,7 +29,9 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
@@ -41,6 +44,9 @@
"docs",
]
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -81,17 +87,21 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("asyncmock", "pytest-asyncio")
- session.install(
- "mock", "pytest", "pytest-cov",
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("-e", ".")
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
@@ -112,6 +122,9 @@ def unit(session):
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
@@ -121,6 +134,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -133,16 +149,26 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install(
- "mock", "pytest", "google-cloud-testutils",
- )
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -153,7 +179,7 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=99")
+ session.run("coverage", "report", "--show-missing", "--fail-under=98")
session.run("coverage", "erase")
@@ -185,9 +211,7 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
- # https://github.com/docascode/sphinx-docfx-yaml/issues/97
- session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/renovate.json b/renovate.json
index 4fa9493..f08bc22 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,6 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"]
}
diff --git a/setup.py b/setup.py
index 507c835..467910a 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@
import setuptools # type: ignore
-version = "0.1.0"
+version = "0.1.1"
package_root = os.path.abspath(os.path.dirname(__file__))
diff --git a/synth.metadata b/synth.metadata
index 3b83d9f..df6e58a 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -4,29 +4,29 @@
"git": {
"name": ".",
"remote": "git@github.com:googleapis/python-binary-authorization",
- "sha": "9f6c2c61b7cdf188da47007f88f032b8dc9b2128"
+ "sha": "2bcce18f2fb53917d58537bc366940093569178c"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "b241bbc1ad920771707fe03e4cedd4e8c492e143",
- "internalRef": "350670798"
+ "sha": "56fc6d43fed71188d7e18f3ca003544646c4ab35",
+ "internalRef": "366346972"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483"
+ "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483"
+ "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f"
}
}
],
@@ -40,5 +40,101 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".coveragerc",
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/header-checker-lint.yml",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic-head.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic-head.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic-head.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples-against-head.sh",
+ ".kokoro/test-samples-impl.sh",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".pre-commit-config.yaml",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/binaryauthorization_v1beta1/binauthz_management_service_v1_beta1.rst",
+ "docs/binaryauthorization_v1beta1/services.rst",
+ "docs/binaryauthorization_v1beta1/types.rst",
+ "docs/conf.py",
+ "docs/multiprocessing.rst",
+ "google/cloud/binaryauthorization/__init__.py",
+ "google/cloud/binaryauthorization/py.typed",
+ "google/cloud/binaryauthorization_v1beta1/__init__.py",
+ "google/cloud/binaryauthorization_v1beta1/py.typed",
+ "google/cloud/binaryauthorization_v1beta1/services/__init__.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/__init__.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/pagers.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/__init__.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/base.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc.py",
+ "google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/grpc_asyncio.py",
+ "google/cloud/binaryauthorization_v1beta1/types/__init__.py",
+ "google/cloud/binaryauthorization_v1beta1/types/resources.py",
+ "google/cloud/binaryauthorization_v1beta1/types/service.py",
+ "mypy.ini",
+ "noxfile.py",
+ "renovate.json",
+ "scripts/decrypt-secrets.sh",
+ "scripts/fixup_binaryauthorization_v1beta1_keywords.py",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/binaryauthorization_v1beta1/__init__.py",
+ "tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py"
]
}
\ No newline at end of file
diff --git a/synth.py b/synth.py
index f5c5198..6780063 100644
--- a/synth.py
+++ b/synth.py
@@ -47,7 +47,7 @@
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=99, microgenerator=True)
+templated_files = common.py_library(cov_level=98, microgenerator=True)
s.move(
templated_files,
excludes=[".coveragerc"], # the microgenerator has a goodcoveragerc file
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
new file mode 100644
index 0000000..22b03cb
--- /dev/null
+++ b/testing/constraints-3.6.txt
@@ -0,0 +1,9 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List all library dependencies and extras in this file.
+# Pin the version to the lower bound.
+
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have google-cloud-foo==1.14.0
+google-api-core==1.22.2
+proto-plus==1.4.0
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 0000000..da93009
--- /dev/null
+++ b/testing/constraints-3.7.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 0000000..da93009
--- /dev/null
+++ b/testing/constraints-3.8.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 0000000..da93009
--- /dev/null
+++ b/testing/constraints-3.9.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/tests/unit/gapic/binaryauthorization_v1beta1/__init__.py b/tests/unit/gapic/binaryauthorization_v1beta1/__init__.py
index 8b13789..42ffdf2 100644
--- a/tests/unit/gapic/binaryauthorization_v1beta1/__init__.py
+++ b/tests/unit/gapic/binaryauthorization_v1beta1/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py
index 3b00805..0c18968 100644
--- a/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py
+++ b/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py
@@ -103,15 +103,25 @@ def test__get_default_mtls_endpoint():
)
-def test_binauthz_management_service_v1_beta1_client_from_service_account_info():
+@pytest.mark.parametrize(
+ "client_class",
+ [
+ BinauthzManagementServiceV1Beta1Client,
+ BinauthzManagementServiceV1Beta1AsyncClient,
+ ],
+)
+def test_binauthz_management_service_v1_beta1_client_from_service_account_info(
+ client_class,
+):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
- client = BinauthzManagementServiceV1Beta1Client.from_service_account_info(info)
+ client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "binaryauthorization.googleapis.com:443"
@@ -133,9 +143,11 @@ def test_binauthz_management_service_v1_beta1_client_from_service_account_file(
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "binaryauthorization.googleapis.com:443"
@@ -204,7 +216,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -220,7 +232,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -236,7 +248,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -264,7 +276,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -325,29 +337,25 @@ def test_binauthz_management_service_v1_beta1_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -356,66 +364,53 @@ def test_binauthz_management_service_v1_beta1_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -445,7 +440,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -479,7 +474,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options_credentials_
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -498,7 +493,7 @@ def test_binauthz_management_service_v1_beta1_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -548,6 +543,22 @@ def test_get_policy_from_dict():
test_get_policy(request_type=dict)
+def test_get_policy_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_policy), "__call__") as call:
+ client.get_policy()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.GetPolicyRequest()
+
+
@pytest.mark.asyncio
async def test_get_policy_async(
transport: str = "grpc_asyncio", request_type=service.GetPolicyRequest
@@ -769,6 +780,22 @@ def test_update_policy_from_dict():
test_update_policy(request_type=dict)
+def test_update_policy_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_policy), "__call__") as call:
+ client.update_policy()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.UpdatePolicyRequest()
+
+
@pytest.mark.asyncio
async def test_update_policy_async(
transport: str = "grpc_asyncio", request_type=service.UpdatePolicyRequest
@@ -989,6 +1016,22 @@ def test_create_attestor_from_dict():
test_create_attestor(request_type=dict)
+def test_create_attestor_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_attestor), "__call__") as call:
+ client.create_attestor()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.CreateAttestorRequest()
+
+
@pytest.mark.asyncio
async def test_create_attestor_async(
transport: str = "grpc_asyncio", request_type=service.CreateAttestorRequest
@@ -1218,6 +1261,22 @@ def test_get_attestor_from_dict():
test_get_attestor(request_type=dict)
+def test_get_attestor_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_attestor), "__call__") as call:
+ client.get_attestor()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.GetAttestorRequest()
+
+
@pytest.mark.asyncio
async def test_get_attestor_async(
transport: str = "grpc_asyncio", request_type=service.GetAttestorRequest
@@ -1427,6 +1486,22 @@ def test_update_attestor_from_dict():
test_update_attestor(request_type=dict)
+def test_update_attestor_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_attestor), "__call__") as call:
+ client.update_attestor()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.UpdateAttestorRequest()
+
+
@pytest.mark.asyncio
async def test_update_attestor_async(
transport: str = "grpc_asyncio", request_type=service.UpdateAttestorRequest
@@ -1638,6 +1713,22 @@ def test_list_attestors_from_dict():
test_list_attestors(request_type=dict)
+def test_list_attestors_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_attestors), "__call__") as call:
+ client.list_attestors()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.ListAttestorsRequest()
+
+
@pytest.mark.asyncio
async def test_list_attestors_async(
transport: str = "grpc_asyncio", request_type=service.ListAttestorsRequest
@@ -1984,6 +2075,22 @@ def test_delete_attestor_from_dict():
test_delete_attestor(request_type=dict)
+def test_delete_attestor_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = BinauthzManagementServiceV1Beta1Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_attestor), "__call__") as call:
+ client.delete_attestor()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == service.DeleteAttestorRequest()
+
+
@pytest.mark.asyncio
async def test_delete_attestor_async(
transport: str = "grpc_asyncio", request_type=service.DeleteAttestorRequest
@@ -2313,6 +2420,53 @@ def test_binauthz_management_service_v1_beta1_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.BinauthzManagementServiceV1Beta1GrpcTransport,
+ transports.BinauthzManagementServiceV1Beta1GrpcAsyncIOTransport,
+ ],
+)
+def test_binauthz_management_service_v1_beta1_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_binauthz_management_service_v1_beta1_host_no_port():
client = BinauthzManagementServiceV1Beta1Client(
credentials=credentials.AnonymousCredentials(),
@@ -2357,6 +2511,8 @@ def test_binauthz_management_service_v1_beta1_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2409,6 +2565,8 @@ def test_binauthz_management_service_v1_beta1_transport_channel_mtls_with_client
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[