diff --git a/.flake8 b/.flake8
index 29227d4cf..1a7e4989e 100644
--- a/.flake8
+++ b/.flake8
@@ -18,6 +18,9 @@
[flake8]
ignore = E203, E266, E501, W503
exclude =
+ # Exclude environment test code.
+ tests/environment/**
+
# Exclude generated code.
**/proto/**
**/gapic/**
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 000000000..fc281c05b
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52f1..b4243ced7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 000000000..19484c9e8
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "tests/environment"]
+ path = tests/environment
+ url = https://github.com/googleapis/env-tests-logging.git
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 7145c57b0..cb17176f0 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation
python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
+
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
diff --git a/.kokoro/environment/appengine_flex_container/common.cfg b/.kokoro/environment/appengine_flex_container/common.cfg
new file mode 100644
index 000000000..1555bf28f
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_container/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "appengine_flex_container"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/appengine_flex_container/continuous.cfg b/.kokoro/environment/appengine_flex_container/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_container/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/appengine_flex_container/presubmit.cfg b/.kokoro/environment/appengine_flex_container/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_container/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/appengine_flex_python/common.cfg b/.kokoro/environment/appengine_flex_python/common.cfg
new file mode 100644
index 000000000..9d3506cb9
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_python/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "appengine_flex_python"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/appengine_flex_python/continuous.cfg b/.kokoro/environment/appengine_flex_python/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_python/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/appengine_flex_python/presubmit.cfg b/.kokoro/environment/appengine_flex_python/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/appengine_flex_python/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/appengine_standard/common.cfg b/.kokoro/environment/appengine_standard/common.cfg
new file mode 100644
index 000000000..07242418a
--- /dev/null
+++ b/.kokoro/environment/appengine_standard/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "appengine_standard"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/appengine_standard/continuous.cfg b/.kokoro/environment/appengine_standard/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/appengine_standard/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/appengine_standard/presubmit.cfg b/.kokoro/environment/appengine_standard/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/appengine_standard/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/cloudrun/common.cfg b/.kokoro/environment/cloudrun/common.cfg
new file mode 100644
index 000000000..a9a26f468
--- /dev/null
+++ b/.kokoro/environment/cloudrun/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "cloudrun"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/cloudrun/continuous.cfg b/.kokoro/environment/cloudrun/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/cloudrun/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/cloudrun/presubmit.cfg b/.kokoro/environment/cloudrun/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/cloudrun/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/compute/common.cfg b/.kokoro/environment/compute/common.cfg
new file mode 100644
index 000000000..ac601ef09
--- /dev/null
+++ b/.kokoro/environment/compute/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "compute"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/compute/continuous.cfg b/.kokoro/environment/compute/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/compute/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/compute/presubmit.cfg b/.kokoro/environment/compute/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/compute/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/functions/common.cfg b/.kokoro/environment/functions/common.cfg
new file mode 100644
index 000000000..96b0940fe
--- /dev/null
+++ b/.kokoro/environment/functions/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "functions"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/functions/continuous.cfg b/.kokoro/environment/functions/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/functions/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/functions/presubmit.cfg b/.kokoro/environment/functions/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/functions/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/functions_37/common.cfg b/.kokoro/environment/functions_37/common.cfg
new file mode 100644
index 000000000..4daa52db7
--- /dev/null
+++ b/.kokoro/environment/functions_37/common.cfg
@@ -0,0 +1,39 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "functions"
+}
+
+env_vars: {
+ key: "RUNTIME"
+ value: "python37"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/functions_37/continuous.cfg b/.kokoro/environment/functions_37/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/functions_37/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/functions_37/presubmit.cfg b/.kokoro/environment/functions_37/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/functions_37/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment/kubernetes/common.cfg b/.kokoro/environment/kubernetes/common.cfg
new file mode 100644
index 000000000..a9fcc33e3
--- /dev/null
+++ b/.kokoro/environment/kubernetes/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+
+# Specify which tests to run
+env_vars: {
+ key: "ENVIRONMENT"
+ value: "kubernetes"
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/environment_tests.sh"
+}
diff --git a/.kokoro/environment/kubernetes/continuous.cfg b/.kokoro/environment/kubernetes/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/environment/kubernetes/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/environment/kubernetes/presubmit.cfg b/.kokoro/environment/kubernetes/presubmit.cfg
new file mode 100644
index 000000000..18a4c3532
--- /dev/null
+++ b/.kokoro/environment/kubernetes/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
diff --git a/.kokoro/environment_tests.sh b/.kokoro/environment_tests.sh
new file mode 100755
index 000000000..f8f138ea1
--- /dev/null
+++ b/.kokoro/environment_tests.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eox pipefail
+
+if [[ -z "${ENVIRONMENT:-}" ]]; then
+ echo "ENVIRONMENT not set. Exiting"
+ exit 1
+fi
+
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-logging"
+fi
+
+# make sure submodule is up to date
+git submodule update --init --recursive
+
+cd "${PROJECT_ROOT}/tests/environment"
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Setup service account credentials.
+export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+gcloud auth activate-service-account --key-file=$GOOGLE_APPLICATION_CREDENTIALS
+
+# Setup project id.
+export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+gcloud config set project $PROJECT_ID
+
+# set a default zone.
+gcloud config set compute/zone us-central1-b
+
+# authenticate docker
+gcloud auth configure-docker -q
+
+# Remove old nox
+python3.6 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+python3.6 -m nox --version
+
+# Install kubectl
+if [[ "${ENVIRONMENT}" == "kubernetes" ]]; then
+ curl -LO https://dl.k8s.io/release/v1.20.0/bin/linux/amd64/kubectl
+ chmod +x kubectl
+ mkdir -p ~/.local/bin
+ mv ./kubectl ~/.local/bin
+ export PATH=$PATH:~/.local/bin
+fi
+
+# create a unique id for this run
+UUID=$(python -c 'import uuid; print(uuid.uuid1())' | head -c 7)
+export ENVCTL_ID=ci-$UUID
+echo $ENVCTL_ID
+
+# Run the specified environment test
+set +e
+python3.6 -m nox --session "tests(language='python', platform='$ENVIRONMENT')"
+TEST_STATUS_CODE=$?
+
+# destroy resources
+echo "cleaning up..."
+${PROJECT_ROOT}/tests/environment/envctl/envctl python $ENVIRONMENT destroy
+
+# exit with proper status code
+exit $TEST_STATUS_CODE
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index ba97b53d5..e75891832 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do
python3.6 -m nox -s "$RUN_TESTS_SESSION"
EXIT=$?
- # If this is a periodic build, send the test log to the Build Cop Bot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
- $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
fi
if [[ $EXIT -ne 0 ]]; then
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 719bcd5ba..4af6cdc26 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
"KOKORO_GITHUB_COMMIT"
"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For Build Cop Bot
+ # For FlakyBot
"KOKORO_GITHUB_COMMIT_URL"
"KOKORO_GITHUB_PULL_REQUEST_URL"
)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a9024b15d..32302e488 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -12,6 +12,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.4
+ rev: 3.9.0
hooks:
- id: flake8
diff --git a/.trampolinerc b/.trampolinerc
index c7d663ae9..6f984309b 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -15,14 +15,12 @@
# Template for .trampolinerc
# Add required env vars here.
-required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
- "NOX_SESSION"
-)
+required_envvars+=()
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "ENVIRONMENT"
+ "RUNTIME"
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
"NOX_SESSION"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b89976db1..02a416d51 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,25 @@
[1]: https://pypi.org/project/google-cloud-logging/#history
+## [2.3.0](https://www.github.com/googleapis/python-logging/compare/v2.2.0...v2.3.0) (2021-03-15)
+
+
+### Features
+
+* Add json setting to allow unicodes to show in log instead of ascii ch… ([#193](https://www.github.com/googleapis/python-logging/issues/193)) ([e8c8e30](https://www.github.com/googleapis/python-logging/commit/e8c8e30fc4f618273dec1415c752eed203c75b67))
+* detect monitored resources on all GCP environments ([#200](https://www.github.com/googleapis/python-logging/issues/200)) ([4eda681](https://www.github.com/googleapis/python-logging/commit/4eda6813d19df8a119f1dcd47ff79389310d4a6f))
+
+
+### Bug Fixes
+
+* logger uses default resource ([#207](https://www.github.com/googleapis/python-logging/issues/207)) ([0f90a79](https://www.github.com/googleapis/python-logging/commit/0f90a79d165314d261413cc369408e15f711129f))
+* no duplicate logs on GCF or GAE ([#209](https://www.github.com/googleapis/python-logging/issues/209)) ([37e6c8e](https://www.github.com/googleapis/python-logging/commit/37e6c8e90775ddc2fc454f5cb13cab04231c2222))
+
+
+### Documentation
+
+* add python std_logging to sample browser ([#173](https://www.github.com/googleapis/python-logging/issues/173)) ([7cc7275](https://www.github.com/googleapis/python-logging/commit/7cc727598c33e7e264ddbeef0a2604a3c215b260))
+
## [2.2.0](https://www.github.com/googleapis/python-logging/compare/v2.1.1...v2.2.0) (2021-01-27)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index cd48664d8..f6ddd7268 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d120..e783f4c62 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py
index ee65d288a..17d853401 100644
--- a/google/cloud/logging_v2/client.py
+++ b/google/cloud/logging_v2/client.py
@@ -29,7 +29,6 @@
from google.cloud.client import ClientWithProject
from google.cloud.environment_vars import DISABLE_GRPC
from google.cloud.logging_v2._helpers import _add_defaults_to_filter
-from google.cloud.logging_v2._helpers import retrieve_metadata_server
from google.cloud.logging_v2._http import Connection
from google.cloud.logging_v2._http import _LoggingAPI as JSONLoggingAPI
from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI
@@ -39,6 +38,9 @@
from google.cloud.logging_v2.handlers import ContainerEngineHandler
from google.cloud.logging_v2.handlers import setup_logging
from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS
+from google.cloud.logging_v2.resource import Resource
+from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
+
from google.cloud.logging_v2.logger import Logger
from google.cloud.logging_v2.metric import Metric
@@ -48,14 +50,9 @@
_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False)
_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC
-_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME"
-"""Environment variable set in App Engine when vm:true is set."""
-
-_APPENGINE_INSTANCE_ID = "GAE_INSTANCE"
-"""Environment variable set in App Engine standard and flexible environment."""
-
-_GKE_CLUSTER_NAME = "instance/attributes/cluster-name"
-"""Attribute in metadata server when in GKE environment."""
+_GAE_RESOURCE_TYPE = "gae_app"
+_GKE_RESOURCE_TYPE = "k8s_container"
+_GCF_RESOURCE_TYPE = "cloud_function"
class Client(ClientWithProject):
@@ -348,17 +345,20 @@ def get_default_handler(self, **kw):
Returns:
logging.Handler: The default log handler based on the environment
"""
- gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME)
+ monitored_resource = kw.pop("resource", detect_resource(self.project))
if (
- _APPENGINE_FLEXIBLE_ENV_VM in os.environ
- or _APPENGINE_INSTANCE_ID in os.environ
+ isinstance(monitored_resource, Resource)
+ and monitored_resource.type == _GAE_RESOURCE_TYPE
):
return AppEngineHandler(self, **kw)
- elif gke_cluster_name is not None:
+ elif (
+ isinstance(monitored_resource, Resource)
+ and monitored_resource.type == _GKE_RESOURCE_TYPE
+ ):
return ContainerEngineHandler(**kw)
else:
- return CloudLoggingHandler(self, **kw)
+ return CloudLoggingHandler(self, resource=monitored_resource, **kw)
def setup_logging(
self, *, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw
diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py
index fff1e9a89..88eba07a6 100644
--- a/google/cloud/logging_v2/handlers/_helpers.py
+++ b/google/cloud/logging_v2/handlers/_helpers.py
@@ -48,7 +48,7 @@ def format_stackdriver_json(record, message):
"severity": record.levelname,
}
- return json.dumps(payload)
+ return json.dumps(payload, ensure_ascii=False)
def get_request_data_from_flask():
diff --git a/google/cloud/logging_v2/handlers/_monitored_resources.py b/google/cloud/logging_v2/handlers/_monitored_resources.py
new file mode 100644
index 000000000..bd05c2522
--- /dev/null
+++ b/google/cloud/logging_v2/handlers/_monitored_resources.py
@@ -0,0 +1,195 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from google.cloud.logging_v2.resource import Resource
+from google.cloud.logging_v2._helpers import retrieve_metadata_server
+
+_GAE_SERVICE_ENV = "GAE_SERVICE"
+_GAE_VERSION_ENV = "GAE_VERSION"
+_GAE_INSTANCE_ENV = "GAE_INSTANCE"
+_GAE_ENV_VARS = [_GAE_SERVICE_ENV, _GAE_VERSION_ENV, _GAE_INSTANCE_ENV]
+"""Environment variables set in App Engine environment."""
+
+_CLOUD_RUN_SERVICE_ID = "K_SERVICE"
+_CLOUD_RUN_REVISION_ID = "K_REVISION"
+_CLOUD_RUN_CONFIGURATION_ID = "K_CONFIGURATION"
+_CLOUD_RUN_ENV_VARS = [
+ _CLOUD_RUN_SERVICE_ID,
+ _CLOUD_RUN_REVISION_ID,
+ _CLOUD_RUN_CONFIGURATION_ID,
+]
+"""Environment variables set in Cloud Run environment."""
+
+_FUNCTION_TARGET = "FUNCTION_TARGET"
+_FUNCTION_SIGNATURE = "FUNCTION_SIGNATURE_TYPE"
+_FUNCTION_NAME = "FUNCTION_NAME"
+_FUNCTION_REGION = "FUNCTION_REGION"
+_FUNCTION_ENTRY = "ENTRY_POINT"
+_FUNCTION_ENV_VARS = [_FUNCTION_TARGET, _FUNCTION_SIGNATURE, _CLOUD_RUN_SERVICE_ID]
+_LEGACY_FUNCTION_ENV_VARS = [_FUNCTION_NAME, _FUNCTION_REGION, _FUNCTION_ENTRY]
+"""Environment variables set in Cloud Functions environments."""
+
+
+_REGION_ID = "instance/region"
+_ZONE_ID = "instance/zone"
+_GCE_INSTANCE_ID = "instance/id"
+"""Attribute in metadata server for compute region and instance."""
+
+_GKE_CLUSTER_NAME = "instance/attributes/cluster-name"
+"""Attribute in metadata server when in GKE environment."""
+
+
+def _create_functions_resource(project):
+ """Create a standardized Cloud Functions resource.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource
+ """
+ region = retrieve_metadata_server(_REGION_ID)
+ if _FUNCTION_NAME in os.environ:
+ function_name = os.environ.get(_FUNCTION_NAME)
+ elif _CLOUD_RUN_SERVICE_ID in os.environ:
+ function_name = os.environ.get(_CLOUD_RUN_SERVICE_ID)
+ else:
+ function_name = ""
+ resource = Resource(
+ type="cloud_function",
+ labels={
+ "project_id": project,
+ "function_name": function_name,
+ "region": region if region else "",
+ },
+ )
+ return resource
+
+
+def _create_kubernetes_resource(project):
+ """Create a standardized Kubernetes resource.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource
+ """
+ zone = retrieve_metadata_server(_ZONE_ID)
+ cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME)
+
+ resource = Resource(
+ type="k8s_container",
+ labels={
+ "project_id": project,
+ "location": zone if zone else "",
+ "cluster_name": cluster_name if cluster_name else "",
+ },
+ )
+ return resource
+
+
+def _create_compute_resource(project):
+ """Create a standardized Compute Engine resource.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource
+ """
+ instance = retrieve_metadata_server(_GCE_INSTANCE_ID)
+ zone = retrieve_metadata_server(_ZONE_ID)
+ resource = Resource(
+ type="gce_instance",
+ labels={
+ "project_id": project,
+ "instance_id": instance if instance else "",
+ "zone": zone if zone else "",
+ },
+ )
+ return resource
+
+
+def _create_cloud_run_resource(project):
+ """Create a standardized Cloud Run resource.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource
+ """
+ region = retrieve_metadata_server(_REGION_ID)
+ resource = Resource(
+ type="cloud_run_revision",
+ labels={
+ "project_id": project,
+ "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""),
+ "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""),
+ "location": region if region else "",
+ "configuration_name": os.environ.get(_CLOUD_RUN_CONFIGURATION_ID, ""),
+ },
+ )
+ return resource
+
+
+def _create_app_engine_resource(project):
+ """Create a standardized App Engine resource.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource
+ """
+ zone = retrieve_metadata_server(_ZONE_ID)
+ resource = Resource(
+ type="gae_app",
+ labels={
+ "project_id": project,
+ "module_id": os.environ.get(_GAE_SERVICE_ENV, ""),
+ "version_id": os.environ.get(_GAE_VERSION_ENV, ""),
+ "zone": zone if zone else "",
+ },
+ )
+ return resource
+
+
+def _create_global_resource(project):
+ return Resource(type="global", labels={"project_id": project})
+
+
+def detect_resource(project=""):
+ """Return the default monitored resource based on the local environment.
+ Args:
+ project (str): The project ID to pass on to the resource
+ Returns:
+ google.cloud.logging.Resource: The default resource based on the environment
+ """
+ gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME)
+ gce_instance_name = retrieve_metadata_server(_GCE_INSTANCE_ID)
+
+ if all([env in os.environ for env in _GAE_ENV_VARS]):
+ # App Engine Flex or Standard
+ return _create_app_engine_resource(project)
+ elif gke_cluster_name is not None:
+ # Kubernetes Engine
+ return _create_kubernetes_resource(project)
+ elif all([env in os.environ for env in _LEGACY_FUNCTION_ENV_VARS]) or all(
+ [env in os.environ for env in _FUNCTION_ENV_VARS]
+ ):
+ # Cloud Functions
+ return _create_functions_resource(project)
+ elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]):
+ # Cloud Run
+ return _create_cloud_run_resource(project)
+ elif gce_instance_name is not None:
+ # Compute Engine
+ return _create_compute_resource(project)
+ else:
+ # use generic global resource
+ return _create_global_resource(project)
diff --git a/google/cloud/logging_v2/handlers/app_engine.py b/google/cloud/logging_v2/handlers/app_engine.py
index a5d57c53e..7d16ab07a 100644
--- a/google/cloud/logging_v2/handlers/app_engine.py
+++ b/google/cloud/logging_v2/handlers/app_engine.py
@@ -22,8 +22,10 @@
import os
from google.cloud.logging_v2.handlers._helpers import get_request_data
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_app_engine_resource,
+)
from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport
-from google.cloud.logging_v2.resource import Resource
_DEFAULT_GAE_LOGGER_NAME = "app"
@@ -75,15 +77,7 @@ def get_gae_resource(self):
Returns:
google.cloud.logging_v2.resource.Resource: Monitored resource for GAE.
"""
- gae_resource = Resource(
- type="gae_app",
- labels={
- "project_id": self.project_id,
- "module_id": self.module_id,
- "version_id": self.version_id,
- },
- )
- return gae_resource
+ return _create_app_engine_resource(self.project_id)
def get_gae_labels(self):
"""Return the labels for GAE app.
diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py
index fd99f7adc..feeac9171 100644
--- a/google/cloud/logging_v2/handlers/handlers.py
+++ b/google/cloud/logging_v2/handlers/handlers.py
@@ -17,12 +17,14 @@
import logging
from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport
-from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
DEFAULT_LOGGER_NAME = "python"
EXCLUDED_LOGGER_DEFAULTS = ("google.cloud", "google.auth", "google_auth_httplib2")
+_CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function")
+
class CloudLoggingHandler(logging.StreamHandler):
"""Handler that directly makes Cloud Logging API calls.
@@ -59,7 +61,7 @@ def __init__(
*,
name=DEFAULT_LOGGER_NAME,
transport=BackgroundThreadTransport,
- resource=_GLOBAL_RESOURCE,
+ resource=None,
labels=None,
stream=None,
):
@@ -78,12 +80,15 @@ def __init__(
:class:`.BackgroundThreadTransport`. The other
option is :class:`.SyncTransport`.
resource (~logging_v2.resource.Resource):
- Resource for this Handler. Defaults to ``GLOBAL_RESOURCE``.
+ Resource for this Handler. If not given, will be inferred from the environment.
labels (Optional[dict]): Monitored resource of the entry, defaults
to the global resource type.
stream (Optional[IO]): Stream to be used by the handler.
"""
super(CloudLoggingHandler, self).__init__(stream)
+ if not resource:
+ # infer the correct monitored resource from the local environment
+ resource = detect_resource(client.project)
self.name = name
self.client = client
self.transport = transport(client, name)
@@ -157,6 +162,11 @@ def setup_logging(
"""
all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS)
logger = logging.getLogger()
+
+ # remove built-in handlers on App Engine or Cloud Functions environments
+ if detect_resource().type in _CLEAR_HANDLER_RESOURCE_TYPES:
+ logger.handlers.clear()
+
logger.setLevel(log_level)
logger.addHandler(handler)
for logger_name in all_excluded_loggers:
diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py
index 6e9c5f00d..124c33934 100644
--- a/google/cloud/logging_v2/logger.py
+++ b/google/cloud/logging_v2/logger.py
@@ -20,6 +20,7 @@
from google.cloud.logging_v2.entries import StructEntry
from google.cloud.logging_v2.entries import TextEntry
from google.cloud.logging_v2.resource import Resource
+from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
_GLOBAL_RESOURCE = Resource(type="global", labels={})
@@ -62,6 +63,7 @@ def __init__(self, name, client, *, labels=None):
self.name = name
self._client = client
self.labels = labels
+ self.default_resource = detect_resource(client.project)
@property
def client(self):
@@ -120,7 +122,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw):
# Apply defaults
kw["log_name"] = kw.pop("log_name", self.full_name)
kw["labels"] = kw.pop("labels", self.labels)
- kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE)
+ kw["resource"] = kw.pop("resource", self.default_resource)
if payload is not None:
entry = _entry_class(payload=payload, **kw)
diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py
index 9603b3754..a82d99b6e 100644
--- a/google/cloud/logging_v2/services/config_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py
@@ -90,8 +90,36 @@ class ConfigServiceV2AsyncClient:
ConfigServiceV2Client.parse_common_location_path
)
- from_service_account_info = ConfigServiceV2Client.from_service_account_info
- from_service_account_file = ConfigServiceV2Client.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ ConfigServiceV2AsyncClient: The constructed client.
+ """
+ return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ ConfigServiceV2AsyncClient: The constructed client.
+ """
+ return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py
index 7d6492ba0..37a28d7a2 100644
--- a/google/cloud/logging_v2/services/config_service_v2/client.py
+++ b/google/cloud/logging_v2/services/config_service_v2/client.py
@@ -338,21 +338,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -395,7 +391,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py
index af5c5faf6..f656fef0d 100644
--- a/google/cloud/logging_v2/services/config_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.logging_v2.types import logging_config
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
index a64405fba..b749eb5d3 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
@@ -58,6 +58,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -88,6 +89,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -104,6 +109,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,11 +123,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -161,12 +166,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
index aa094ea0e..f0f1ca070 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
@@ -102,6 +102,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -133,6 +134,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -149,6 +154,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -158,11 +168,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -206,12 +211,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py
index 0c1ae3fae..5afd77be5 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py
@@ -87,8 +87,36 @@ class LoggingServiceV2AsyncClient:
LoggingServiceV2Client.parse_common_location_path
)
- from_service_account_info = LoggingServiceV2Client.from_service_account_info
- from_service_account_file = LoggingServiceV2Client.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ LoggingServiceV2AsyncClient: The constructed client.
+ """
+ return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ LoggingServiceV2AsyncClient: The constructed client.
+ """
+ return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py
index a340eb205..00d758ab5 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/client.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/client.py
@@ -292,21 +292,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -349,7 +345,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -582,12 +578,10 @@ def write_log_entries(
request.log_name = log_name
if resource is not None:
request.resource = resource
-
- if labels:
- request.labels.update(labels)
-
- if entries:
- request.entries.extend(entries)
+ if labels is not None:
+ request.labels = labels
+ if entries is not None:
+ request.entries = entries
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -703,14 +697,13 @@ def list_log_entries(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
+ if resource_names is not None:
+ request.resource_names = resource_names
if filter is not None:
request.filter = filter
if order_by is not None:
request.order_by = order_by
- if resource_names:
- request.resource_names.extend(resource_names)
-
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_log_entries]
diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py
index 5492a3a30..7ab8ac8d2 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.api import monitored_resource_pb2 as monitored_resource # type: ignore
from google.cloud.logging_v2.types import log_entry
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
index f8007bb0d..e90b2a5fe 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
@@ -58,6 +58,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -88,6 +89,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -104,6 +109,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,11 +123,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -161,12 +166,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
index 6adea9ca5..40037da25 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
@@ -102,6 +102,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -133,6 +134,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -149,6 +154,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -158,11 +168,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -206,12 +211,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
index 2c592e685..09b2c3173 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
@@ -80,8 +80,36 @@ class MetricsServiceV2AsyncClient:
MetricsServiceV2Client.parse_common_location_path
)
- from_service_account_info = MetricsServiceV2Client.from_service_account_info
- from_service_account_file = MetricsServiceV2Client.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ MetricsServiceV2AsyncClient: The constructed client.
+ """
+ return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ MetricsServiceV2AsyncClient: The constructed client.
+ """
+ return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py
index cc6e491fc..850236a57 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/client.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py
@@ -286,21 +286,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -343,7 +339,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
index 51c398598..15134ac57 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.logging_v2.types import logging_metrics
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
index 1cb9262ab..e55bf32e5 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
@@ -58,6 +58,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -88,6 +89,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -104,6 +109,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,11 +123,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -161,12 +166,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
index ddbd16da6..ec93d3850 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
@@ -102,6 +102,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -133,6 +134,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -149,6 +154,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -158,11 +168,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -206,12 +211,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py
index dce385af3..9519c0777 100644
--- a/google/cloud/logging_v2/types/__init__.py
+++ b/google/cloud/logging_v2/types/__init__.py
@@ -20,121 +20,121 @@
LogEntryOperation,
LogEntrySourceLocation,
)
+from .logging import (
+ DeleteLogRequest,
+ ListLogEntriesRequest,
+ ListLogEntriesResponse,
+ ListLogsRequest,
+ ListLogsResponse,
+ ListMonitoredResourceDescriptorsRequest,
+ ListMonitoredResourceDescriptorsResponse,
+ TailLogEntriesRequest,
+ TailLogEntriesResponse,
+ WriteLogEntriesPartialErrors,
+ WriteLogEntriesRequest,
+ WriteLogEntriesResponse,
+)
from .logging_config import (
- LogBucket,
- LogView,
- LogSink,
BigQueryOptions,
- ListBucketsRequest,
- ListBucketsResponse,
+ CmekSettings,
CreateBucketRequest,
- UpdateBucketRequest,
- GetBucketRequest,
- DeleteBucketRequest,
- UndeleteBucketRequest,
- ListViewsRequest,
- ListViewsResponse,
+ CreateExclusionRequest,
+ CreateSinkRequest,
CreateViewRequest,
- UpdateViewRequest,
- GetViewRequest,
+ DeleteBucketRequest,
+ DeleteExclusionRequest,
+ DeleteSinkRequest,
DeleteViewRequest,
- ListSinksRequest,
- ListSinksResponse,
+ GetBucketRequest,
+ GetCmekSettingsRequest,
+ GetExclusionRequest,
GetSinkRequest,
- CreateSinkRequest,
- UpdateSinkRequest,
- DeleteSinkRequest,
- LogExclusion,
+ GetViewRequest,
+ ListBucketsRequest,
+ ListBucketsResponse,
ListExclusionsRequest,
ListExclusionsResponse,
- GetExclusionRequest,
- CreateExclusionRequest,
- UpdateExclusionRequest,
- DeleteExclusionRequest,
- GetCmekSettingsRequest,
+ ListSinksRequest,
+ ListSinksResponse,
+ ListViewsRequest,
+ ListViewsResponse,
+ LogBucket,
+ LogExclusion,
+ LogSink,
+ LogView,
+ UndeleteBucketRequest,
+ UpdateBucketRequest,
UpdateCmekSettingsRequest,
- CmekSettings,
+ UpdateExclusionRequest,
+ UpdateSinkRequest,
+ UpdateViewRequest,
LifecycleState,
)
-from .logging import (
- DeleteLogRequest,
- WriteLogEntriesRequest,
- WriteLogEntriesResponse,
- WriteLogEntriesPartialErrors,
- ListLogEntriesRequest,
- ListLogEntriesResponse,
- ListMonitoredResourceDescriptorsRequest,
- ListMonitoredResourceDescriptorsResponse,
- ListLogsRequest,
- ListLogsResponse,
- TailLogEntriesRequest,
- TailLogEntriesResponse,
-)
from .logging_metrics import (
- LogMetric,
+ CreateLogMetricRequest,
+ DeleteLogMetricRequest,
+ GetLogMetricRequest,
ListLogMetricsRequest,
ListLogMetricsResponse,
- GetLogMetricRequest,
- CreateLogMetricRequest,
+ LogMetric,
UpdateLogMetricRequest,
- DeleteLogMetricRequest,
)
__all__ = (
"LogEntry",
"LogEntryOperation",
"LogEntrySourceLocation",
- "LogBucket",
- "LogView",
- "LogSink",
+ "DeleteLogRequest",
+ "ListLogEntriesRequest",
+ "ListLogEntriesResponse",
+ "ListLogsRequest",
+ "ListLogsResponse",
+ "ListMonitoredResourceDescriptorsRequest",
+ "ListMonitoredResourceDescriptorsResponse",
+ "TailLogEntriesRequest",
+ "TailLogEntriesResponse",
+ "WriteLogEntriesPartialErrors",
+ "WriteLogEntriesRequest",
+ "WriteLogEntriesResponse",
"BigQueryOptions",
- "ListBucketsRequest",
- "ListBucketsResponse",
+ "CmekSettings",
"CreateBucketRequest",
- "UpdateBucketRequest",
- "GetBucketRequest",
- "DeleteBucketRequest",
- "UndeleteBucketRequest",
- "ListViewsRequest",
- "ListViewsResponse",
+ "CreateExclusionRequest",
+ "CreateSinkRequest",
"CreateViewRequest",
- "UpdateViewRequest",
- "GetViewRequest",
+ "DeleteBucketRequest",
+ "DeleteExclusionRequest",
+ "DeleteSinkRequest",
"DeleteViewRequest",
- "ListSinksRequest",
- "ListSinksResponse",
+ "GetBucketRequest",
+ "GetCmekSettingsRequest",
+ "GetExclusionRequest",
"GetSinkRequest",
- "CreateSinkRequest",
- "UpdateSinkRequest",
- "DeleteSinkRequest",
- "LogExclusion",
+ "GetViewRequest",
+ "ListBucketsRequest",
+ "ListBucketsResponse",
"ListExclusionsRequest",
"ListExclusionsResponse",
- "GetExclusionRequest",
- "CreateExclusionRequest",
- "UpdateExclusionRequest",
- "DeleteExclusionRequest",
- "GetCmekSettingsRequest",
+ "ListSinksRequest",
+ "ListSinksResponse",
+ "ListViewsRequest",
+ "ListViewsResponse",
+ "LogBucket",
+ "LogExclusion",
+ "LogSink",
+ "LogView",
+ "UndeleteBucketRequest",
+ "UpdateBucketRequest",
"UpdateCmekSettingsRequest",
- "CmekSettings",
+ "UpdateExclusionRequest",
+ "UpdateSinkRequest",
+ "UpdateViewRequest",
"LifecycleState",
- "DeleteLogRequest",
- "WriteLogEntriesRequest",
- "WriteLogEntriesResponse",
- "WriteLogEntriesPartialErrors",
- "ListLogEntriesRequest",
- "ListLogEntriesResponse",
- "ListMonitoredResourceDescriptorsRequest",
- "ListMonitoredResourceDescriptorsResponse",
- "ListLogsRequest",
- "ListLogsResponse",
- "TailLogEntriesRequest",
- "TailLogEntriesResponse",
- "LogMetric",
+ "CreateLogMetricRequest",
+ "DeleteLogMetricRequest",
+ "GetLogMetricRequest",
"ListLogMetricsRequest",
"ListLogMetricsResponse",
- "GetLogMetricRequest",
- "CreateLogMetricRequest",
+ "LogMetric",
"UpdateLogMetricRequest",
- "DeleteLogMetricRequest",
)
diff --git a/logging-v2-py.tar.gz b/logging-v2-py.tar.gz
deleted file mode 100644
index e69de29bb..000000000
diff --git a/noxfile.py b/noxfile.py
index e68736640..1183ca5fb 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -84,12 +84,14 @@ def default(session):
session.install("asyncmock", "pytest-asyncio")
session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django")
+
session.install("-e", ".")
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
@@ -119,6 +121,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -144,9 +149,21 @@ def system(session):
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
diff --git a/renovate.json b/renovate.json
index 4fa949311..2b581d170 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,8 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "git-submodules": {
+ "enabled": true
+ }
}
diff --git a/samples/snippets/handler.py b/samples/snippets/handler.py
index 9a63d022f..0a708c138 100644
--- a/samples/snippets/handler.py
+++ b/samples/snippets/handler.py
@@ -16,6 +16,7 @@
def use_logging_handler():
+ # [START logging_stdlogging]
# [START logging_handler_setup]
# Imports the Cloud Logging client library
import google.cloud.logging
@@ -43,6 +44,7 @@ def use_logging_handler():
# [END logging_handler_usage]
print("Logged: {}".format(text))
+ # [END logging_stdlogging]
if __name__ == "__main__":
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index bca0522ec..97bf7da80 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index db786f889..cc757aabc 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1,4 +1,4 @@
-google-cloud-logging==2.1.1
-google-cloud-storage==1.35.0
-google-cloud-pubsub==2.2.0
-google-cloud-bigquery==2.6.2
+google-cloud-logging==2.2.0
+google-cloud-bigquery==2.11.0
+google-cloud-storage==1.36.2
+google-cloud-pubsub==2.4.0
diff --git a/setup.py b/setup.py
index 8ede9877e..8885090fe 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-logging"
description = "Stackdriver Logging API client library"
-version = "2.2.0"
+version = "2.3.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
diff --git a/synth.metadata b/synth.metadata
index 28c8b61cc..5656e5bc6 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -4,29 +4,29 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/python-logging.git",
- "sha": "c89dea4899a2fd7175c2849f158fb921fc017a15"
+ "sha": "42bda27dd3eee2123cbe5b21dd75a587af7a910b"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "520682435235d9c503983a360a2090025aa47cd1",
- "internalRef": "350246057"
+ "sha": "28a591963253d52ce3a25a918cafbdd9928de8cf",
+ "internalRef": "361662015"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "16ec872dd898d7de6e1822badfac32484b5d9031"
+ "sha": "0780323da96d5a53925fe0547757181fe76e8f1e"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "16ec872dd898d7de6e1822badfac32484b5d9031"
+ "sha": "0780323da96d5a53925fe0547757181fe76e8f1e"
}
}
],
@@ -49,6 +49,7 @@
".github/ISSUE_TEMPLATE/feature_request.md",
".github/ISSUE_TEMPLATE/support_request.md",
".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/header-checker-lint.yml",
".github/release-please.yml",
".github/snippet-bot.yml",
".gitignore",
@@ -131,7 +132,6 @@
"google/cloud/logging_v2/types/logging.py",
"google/cloud/logging_v2/types/logging_config.py",
"google/cloud/logging_v2/types/logging_metrics.py",
- "logging-v2-py.tar.gz",
"mypy.ini",
"noxfile.py",
"renovate.json",
diff --git a/synth.py b/synth.py
index 5be817361..7f7008a39 100644
--- a/synth.py
+++ b/synth.py
@@ -68,6 +68,25 @@
)
s.move(templated_files, excludes=[".coveragerc", "docs/multiprocessing.rst"])
+# adjust .trampolinerc for environment tests
+s.replace(
+ ".trampolinerc",
+ "required_envvars[^\)]*\)",
+ "required_envvars+=()"
+)
+s.replace(
+ ".trampolinerc",
+ "pass_down_envvars\+\=\(",
+ 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"'
+)
+
+# don't lint environment tests
+s.replace(
+ ".flake8",
+ "exclude =",
+ 'exclude =\n # Exclude environment test code.\n tests/environment/**\n'
+)
+
# --------------------------------------------------------------------------
# Samples templates
# --------------------------------------------------------------------------
@@ -80,4 +99,4 @@
python.py_samples()
-s.shell.run(["nox", "-s", "blacken"], hide_output=False)
\ No newline at end of file
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/tests/environment b/tests/environment
new file mode 160000
index 000000000..eb60e8239
--- /dev/null
+++ b/tests/environment
@@ -0,0 +1 @@
+Subproject commit eb60e823924dabaaea62e2ec0b8243eb868c1826
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index 9cd0ac253..e6f5aa7cf 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -72,10 +72,10 @@ def _list_entries(logger):
:rtype: list
:returns: List of all entries consumed.
"""
- inner = RetryResult(_has_entries, delay=1, backoff=2, max_tries=6)(_consume_entries)
+ inner = RetryResult(_has_entries, delay=2, backoff=2, max_tries=6)(_consume_entries)
outer = RetryErrors(
(ServiceUnavailable, ResourceExhausted, InternalServerError),
- delay=1,
+ delay=2,
backoff=2,
max_tries=6,
)(inner)
diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py
index 8b1378917..42ffdf2bc 100644
--- a/tests/unit/gapic/logging_v2/__init__.py
+++ b/tests/unit/gapic/logging_v2/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py
index a2685b497..d6a2f3983 100644
--- a/tests/unit/gapic/logging_v2/test_config_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py
@@ -88,15 +88,19 @@ def test__get_default_mtls_endpoint():
)
-def test_config_service_v2_client_from_service_account_info():
+@pytest.mark.parametrize(
+ "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,]
+)
+def test_config_service_v2_client_from_service_account_info(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
- client = ConfigServiceV2Client.from_service_account_info(info)
+ client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -112,9 +116,11 @@ def test_config_service_v2_client_from_service_account_file(client_class):
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -175,7 +181,7 @@ def test_config_service_v2_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -191,7 +197,7 @@ def test_config_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -207,7 +213,7 @@ def test_config_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -235,7 +241,7 @@ def test_config_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -296,29 +302,25 @@ def test_config_service_v2_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -327,66 +329,53 @@ def test_config_service_v2_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -412,7 +401,7 @@ def test_config_service_v2_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -442,7 +431,7 @@ def test_config_service_v2_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -461,7 +450,7 @@ def test_config_service_v2_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -504,6 +493,22 @@ def test_list_buckets_from_dict():
test_list_buckets(request_type=dict)
+def test_list_buckets_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_buckets), "__call__") as call:
+ client.list_buckets()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.ListBucketsRequest()
+
+
@pytest.mark.asyncio
async def test_list_buckets_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest
@@ -847,6 +852,22 @@ def test_get_bucket_from_dict():
test_get_bucket(request_type=dict)
+def test_get_bucket_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
+ client.get_bucket()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.GetBucketRequest()
+
+
@pytest.mark.asyncio
async def test_get_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest
@@ -999,6 +1020,22 @@ def test_create_bucket_from_dict():
test_create_bucket(request_type=dict)
+def test_create_bucket_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_bucket), "__call__") as call:
+ client.create_bucket()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.CreateBucketRequest()
+
+
@pytest.mark.asyncio
async def test_create_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest
@@ -1151,6 +1188,22 @@ def test_update_bucket_from_dict():
test_update_bucket(request_type=dict)
+def test_update_bucket_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_bucket), "__call__") as call:
+ client.update_bucket()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UpdateBucketRequest()
+
+
@pytest.mark.asyncio
async def test_update_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest
@@ -1286,6 +1339,22 @@ def test_delete_bucket_from_dict():
test_delete_bucket(request_type=dict)
+def test_delete_bucket_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
+ client.delete_bucket()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.DeleteBucketRequest()
+
+
@pytest.mark.asyncio
async def test_delete_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest
@@ -1401,6 +1470,22 @@ def test_undelete_bucket_from_dict():
test_undelete_bucket(request_type=dict)
+def test_undelete_bucket_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call:
+ client.undelete_bucket()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UndeleteBucketRequest()
+
+
@pytest.mark.asyncio
async def test_undelete_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest
@@ -1521,6 +1606,22 @@ def test_list_views_from_dict():
test_list_views(request_type=dict)
+def test_list_views_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_views), "__call__") as call:
+ client.list_views()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.ListViewsRequest()
+
+
@pytest.mark.asyncio
async def test_list_views_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest
@@ -1854,6 +1955,22 @@ def test_get_view_from_dict():
test_get_view(request_type=dict)
+def test_get_view_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_view), "__call__") as call:
+ client.get_view()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.GetViewRequest()
+
+
@pytest.mark.asyncio
async def test_get_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest
@@ -1992,6 +2109,22 @@ def test_create_view_from_dict():
test_create_view(request_type=dict)
+def test_create_view_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_view), "__call__") as call:
+ client.create_view()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.CreateViewRequest()
+
+
@pytest.mark.asyncio
async def test_create_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest
@@ -2130,6 +2263,22 @@ def test_update_view_from_dict():
test_update_view(request_type=dict)
+def test_update_view_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_view), "__call__") as call:
+ client.update_view()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UpdateViewRequest()
+
+
@pytest.mark.asyncio
async def test_update_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest
@@ -2259,6 +2408,22 @@ def test_delete_view_from_dict():
test_delete_view(request_type=dict)
+def test_delete_view_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_view), "__call__") as call:
+ client.delete_view()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.DeleteViewRequest()
+
+
@pytest.mark.asyncio
async def test_delete_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest
@@ -2379,6 +2544,22 @@ def test_list_sinks_from_dict():
test_list_sinks(request_type=dict)
+def test_list_sinks_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_sinks), "__call__") as call:
+ client.list_sinks()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.ListSinksRequest()
+
+
@pytest.mark.asyncio
async def test_list_sinks_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest
@@ -2732,6 +2913,22 @@ def test_get_sink_from_dict():
test_get_sink(request_type=dict)
+def test_get_sink_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_sink), "__call__") as call:
+ client.get_sink()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.GetSinkRequest()
+
+
@pytest.mark.asyncio
async def test_get_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest
@@ -2972,6 +3169,22 @@ def test_create_sink_from_dict():
test_create_sink(request_type=dict)
+def test_create_sink_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_sink), "__call__") as call:
+ client.create_sink()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.CreateSinkRequest()
+
+
@pytest.mark.asyncio
async def test_create_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest
@@ -3224,6 +3437,22 @@ def test_update_sink_from_dict():
test_update_sink(request_type=dict)
+def test_update_sink_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_sink), "__call__") as call:
+ client.update_sink()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UpdateSinkRequest()
+
+
@pytest.mark.asyncio
async def test_update_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest
@@ -3457,6 +3686,22 @@ def test_delete_sink_from_dict():
test_delete_sink(request_type=dict)
+def test_delete_sink_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
+ client.delete_sink()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.DeleteSinkRequest()
+
+
@pytest.mark.asyncio
async def test_delete_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest
@@ -3642,6 +3887,22 @@ def test_list_exclusions_from_dict():
test_list_exclusions(request_type=dict)
+def test_list_exclusions_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
+ client.list_exclusions()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.ListExclusionsRequest()
+
+
@pytest.mark.asyncio
async def test_list_exclusions_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest
@@ -4004,6 +4265,22 @@ def test_get_exclusion_from_dict():
test_get_exclusion(request_type=dict)
+def test_get_exclusion_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call:
+ client.get_exclusion()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.GetExclusionRequest()
+
+
@pytest.mark.asyncio
async def test_get_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest
@@ -4217,6 +4494,22 @@ def test_create_exclusion_from_dict():
test_create_exclusion(request_type=dict)
+def test_create_exclusion_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call:
+ client.create_exclusion()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.CreateExclusionRequest()
+
+
@pytest.mark.asyncio
async def test_create_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest
@@ -4444,6 +4737,22 @@ def test_update_exclusion_from_dict():
test_update_exclusion(request_type=dict)
+def test_update_exclusion_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call:
+ client.update_exclusion()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UpdateExclusionRequest()
+
+
@pytest.mark.asyncio
async def test_update_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest
@@ -4665,6 +4974,22 @@ def test_delete_exclusion_from_dict():
test_delete_exclusion(request_type=dict)
+def test_delete_exclusion_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call:
+ client.delete_exclusion()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.DeleteExclusionRequest()
+
+
@pytest.mark.asyncio
async def test_delete_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest
@@ -4858,6 +5183,24 @@ def test_get_cmek_settings_from_dict():
test_get_cmek_settings(request_type=dict)
+def test_get_cmek_settings_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_cmek_settings), "__call__"
+ ) as call:
+ client.get_cmek_settings()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.GetCmekSettingsRequest()
+
+
@pytest.mark.asyncio
async def test_get_cmek_settings_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest
@@ -5006,6 +5349,24 @@ def test_update_cmek_settings_from_dict():
test_update_cmek_settings(request_type=dict)
+def test_update_cmek_settings_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = ConfigServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_cmek_settings), "__call__"
+ ) as call:
+ client.update_cmek_settings()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_config.UpdateCmekSettingsRequest()
+
+
@pytest.mark.asyncio
async def test_update_cmek_settings_async(
transport: str = "grpc_asyncio",
@@ -5306,6 +5667,56 @@ def test_config_service_v2_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConfigServiceV2GrpcTransport,
+ transports.ConfigServiceV2GrpcAsyncIOTransport,
+ ],
+)
+def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ "https://www.googleapis.com/auth/logging.admin",
+ "https://www.googleapis.com/auth/logging.read",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_config_service_v2_host_no_port():
client = ConfigServiceV2Client(
credentials=credentials.AnonymousCredentials(),
@@ -5350,6 +5761,8 @@ def test_config_service_v2_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -5407,6 +5820,8 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py
index 110a383c0..66f22621c 100644
--- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py
@@ -95,15 +95,19 @@ def test__get_default_mtls_endpoint():
)
-def test_logging_service_v2_client_from_service_account_info():
+@pytest.mark.parametrize(
+ "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,]
+)
+def test_logging_service_v2_client_from_service_account_info(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
- client = LoggingServiceV2Client.from_service_account_info(info)
+ client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -119,9 +123,11 @@ def test_logging_service_v2_client_from_service_account_file(client_class):
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -182,7 +188,7 @@ def test_logging_service_v2_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -198,7 +204,7 @@ def test_logging_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -214,7 +220,7 @@ def test_logging_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -242,7 +248,7 @@ def test_logging_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -303,29 +309,25 @@ def test_logging_service_v2_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -334,66 +336,53 @@ def test_logging_service_v2_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -419,7 +408,7 @@ def test_logging_service_v2_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -449,7 +438,7 @@ def test_logging_service_v2_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -468,7 +457,7 @@ def test_logging_service_v2_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -504,6 +493,22 @@ def test_delete_log_from_dict():
test_delete_log(request_type=dict)
+def test_delete_log_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = LoggingServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_log), "__call__") as call:
+ client.delete_log()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging.DeleteLogRequest()
+
+
@pytest.mark.asyncio
async def test_delete_log_async(
transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest
@@ -693,6 +698,24 @@ def test_write_log_entries_from_dict():
test_write_log_entries(request_type=dict)
+def test_write_log_entries_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = LoggingServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.write_log_entries), "__call__"
+ ) as call:
+ client.write_log_entries()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging.WriteLogEntriesRequest()
+
+
@pytest.mark.asyncio
async def test_write_log_entries_async(
transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest
@@ -877,6 +900,22 @@ def test_list_log_entries_from_dict():
test_list_log_entries(request_type=dict)
+def test_list_log_entries_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = LoggingServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call:
+ client.list_log_entries()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging.ListLogEntriesRequest()
+
+
@pytest.mark.asyncio
async def test_list_log_entries_async(
transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest
@@ -1183,6 +1222,24 @@ def test_list_monitored_resource_descriptors_from_dict():
test_list_monitored_resource_descriptors(request_type=dict)
+def test_list_monitored_resource_descriptors_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = LoggingServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_monitored_resource_descriptors), "__call__"
+ ) as call:
+ client.list_monitored_resource_descriptors()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging.ListMonitoredResourceDescriptorsRequest()
+
+
@pytest.mark.asyncio
async def test_list_monitored_resource_descriptors_async(
transport: str = "grpc_asyncio",
@@ -1446,6 +1503,22 @@ def test_list_logs_from_dict():
test_list_logs(request_type=dict)
+def test_list_logs_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = LoggingServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_logs), "__call__") as call:
+ client.list_logs()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging.ListLogsRequest()
+
+
@pytest.mark.asyncio
async def test_list_logs_async(
transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest
@@ -1971,6 +2044,57 @@ def test_logging_service_v2_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.LoggingServiceV2GrpcTransport,
+ transports.LoggingServiceV2GrpcAsyncIOTransport,
+ ],
+)
+def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ "https://www.googleapis.com/auth/logging.admin",
+ "https://www.googleapis.com/auth/logging.read",
+ "https://www.googleapis.com/auth/logging.write",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_logging_service_v2_host_no_port():
client = LoggingServiceV2Client(
credentials=credentials.AnonymousCredentials(),
@@ -2015,6 +2139,8 @@ def test_logging_service_v2_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2073,6 +2199,8 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
index 8ae5fdc54..6faec201e 100644
--- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
@@ -94,15 +94,19 @@ def test__get_default_mtls_endpoint():
)
-def test_metrics_service_v2_client_from_service_account_info():
+@pytest.mark.parametrize(
+ "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,]
+)
+def test_metrics_service_v2_client_from_service_account_info(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
- client = MetricsServiceV2Client.from_service_account_info(info)
+ client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -118,9 +122,11 @@ def test_metrics_service_v2_client_from_service_account_file(client_class):
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "logging.googleapis.com:443"
@@ -181,7 +187,7 @@ def test_metrics_service_v2_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -197,7 +203,7 @@ def test_metrics_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -213,7 +219,7 @@ def test_metrics_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -241,7 +247,7 @@ def test_metrics_service_v2_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -302,29 +308,25 @@ def test_metrics_service_v2_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -333,66 +335,53 @@ def test_metrics_service_v2_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -418,7 +407,7 @@ def test_metrics_service_v2_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -448,7 +437,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -467,7 +456,7 @@ def test_metrics_service_v2_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -510,6 +499,22 @@ def test_list_log_metrics_from_dict():
test_list_log_metrics(request_type=dict)
+def test_list_log_metrics_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = MetricsServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call:
+ client.list_log_metrics()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_metrics.ListLogMetricsRequest()
+
+
@pytest.mark.asyncio
async def test_list_log_metrics_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest
@@ -861,6 +866,22 @@ def test_get_log_metric_from_dict():
test_get_log_metric(request_type=dict)
+def test_get_log_metric_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = MetricsServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call:
+ client.get_log_metric()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_metrics.GetLogMetricRequest()
+
+
@pytest.mark.asyncio
async def test_get_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest
@@ -1088,6 +1109,24 @@ def test_create_log_metric_from_dict():
test_create_log_metric(request_type=dict)
+def test_create_log_metric_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = MetricsServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_log_metric), "__call__"
+ ) as call:
+ client.create_log_metric()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_metrics.CreateLogMetricRequest()
+
+
@pytest.mark.asyncio
async def test_create_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest
@@ -1337,6 +1376,24 @@ def test_update_log_metric_from_dict():
test_update_log_metric(request_type=dict)
+def test_update_log_metric_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = MetricsServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_log_metric), "__call__"
+ ) as call:
+ client.update_log_metric()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_metrics.UpdateLogMetricRequest()
+
+
@pytest.mark.asyncio
async def test_update_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest
@@ -1571,6 +1628,24 @@ def test_delete_log_metric_from_dict():
test_delete_log_metric(request_type=dict)
+def test_delete_log_metric_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = MetricsServiceV2Client(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_log_metric), "__call__"
+ ) as call:
+ client.delete_log_metric()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == logging_metrics.DeleteLogMetricRequest()
+
+
@pytest.mark.asyncio
async def test_delete_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest
@@ -1916,6 +1991,57 @@ def test_metrics_service_v2_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.MetricsServiceV2GrpcTransport,
+ transports.MetricsServiceV2GrpcAsyncIOTransport,
+ ],
+)
+def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ "https://www.googleapis.com/auth/logging.admin",
+ "https://www.googleapis.com/auth/logging.read",
+ "https://www.googleapis.com/auth/logging.write",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_metrics_service_v2_host_no_port():
client = MetricsServiceV2Client(
credentials=credentials.AnonymousCredentials(),
@@ -1960,6 +2086,8 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2018,6 +2146,8 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
diff --git a/tests/unit/handlers/test__monitored_resources.py b/tests/unit/handlers/test__monitored_resources.py
new file mode 100644
index 000000000..00fade39c
--- /dev/null
+++ b/tests/unit/handlers/test__monitored_resources.py
@@ -0,0 +1,249 @@
+# Copyright 2021 Google LLC All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+import os
+
+
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_functions_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_app_engine_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_kubernetes_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_cloud_run_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_compute_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_global_resource,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
+from google.cloud.logging_v2.handlers import _monitored_resources
+from google.cloud.logging_v2.resource import Resource
+
+
+class Test_Create_Resources(unittest.TestCase):
+
+ PROJECT = "test-project"
+ LOCATION = "test-location"
+ NAME = "test-name"
+ CLUSTER = "test-cluster"
+ VERSION = "1"
+ CONFIG = "test-config"
+
+ def _mock_metadata(self, endpoint):
+ if (
+ endpoint == _monitored_resources._ZONE_ID
+ or endpoint == _monitored_resources._REGION_ID
+ ):
+ return self.LOCATION
+ elif (
+ endpoint == _monitored_resources._GKE_CLUSTER_NAME
+ or endpoint == _monitored_resources._GCE_INSTANCE_ID
+ ):
+ return self.NAME
+ else:
+ return None
+
+ def setUp(self):
+ os.environ.clear()
+
+ def test_create_legacy_functions_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+
+ os.environ[_monitored_resources._CLOUD_RUN_SERVICE_ID] = self.NAME
+ with patch:
+ legacy_func_resource = _create_functions_resource(self.PROJECT)
+
+ self.assertIsInstance(legacy_func_resource, Resource)
+ self.assertEqual(legacy_func_resource.type, "cloud_function")
+ self.assertEqual(legacy_func_resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(legacy_func_resource.labels["function_name"], self.NAME)
+ self.assertEqual(legacy_func_resource.labels["region"], self.LOCATION)
+
+ def test_create_modern_functions_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+ os.environ[_monitored_resources._FUNCTION_NAME] = self.NAME
+ with patch:
+ func_resource = _create_functions_resource(self.PROJECT)
+
+ self.assertIsInstance(func_resource, Resource)
+ self.assertEqual(func_resource.type, "cloud_function")
+ self.assertEqual(func_resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(func_resource.labels["function_name"], self.NAME)
+ self.assertEqual(func_resource.labels["region"], self.LOCATION)
+
+ def test_create_kubernetes_resource(self):
+
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+ with patch:
+ resource = _create_kubernetes_resource(self.PROJECT)
+
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "k8s_container")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(resource.labels["cluster_name"], self.NAME)
+ self.assertEqual(resource.labels["location"], self.LOCATION)
+
+ def test_compute_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+
+ with patch:
+ resource = _create_compute_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "gce_instance")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(resource.labels["instance_id"], self.NAME)
+ self.assertEqual(resource.labels["zone"], self.LOCATION)
+
+ def test_cloud_run_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+ os.environ[_monitored_resources._CLOUD_RUN_SERVICE_ID] = self.NAME
+ os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION
+ os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG
+ with patch:
+ resource = _create_cloud_run_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_run_revision")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(resource.labels["service_name"], self.NAME)
+ self.assertEqual(resource.labels["revision_name"], self.VERSION)
+ self.assertEqual(resource.labels["configuration_name"], self.CONFIG)
+ self.assertEqual(resource.labels["location"], self.LOCATION)
+
+ def test_app_engine_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+ os.environ[_monitored_resources._GAE_SERVICE_ENV] = self.NAME
+ os.environ[_monitored_resources._GAE_VERSION_ENV] = self.VERSION
+ with patch:
+ resource = _create_app_engine_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "gae_app")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(resource.labels["module_id"], self.NAME)
+ self.assertEqual(resource.labels["version_id"], self.VERSION)
+ self.assertEqual(resource.labels["zone"], self.LOCATION)
+
+ def test_global_resource(self):
+ resource = _create_global_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "global")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+
+
+class Test_Resource_Detection(unittest.TestCase):
+
+ PROJECT = "test-project"
+
+ def _mock_k8s_metadata(self, endpoint):
+ if (
+ endpoint == _monitored_resources._GKE_CLUSTER_NAME
+ or endpoint == _monitored_resources._GCE_INSTANCE_ID
+ ):
+ return "TRUE"
+ else:
+ return None
+
+ def _mock_gce_metadata(self, endpoint):
+ if endpoint == _monitored_resources._GCE_INSTANCE_ID:
+ return "TRUE"
+ else:
+ return None
+
+ def setUp(self):
+ os.environ.clear()
+
+ def test_detect_appengine(self):
+ for env in _monitored_resources._GAE_ENV_VARS:
+ os.environ[env] = "TRUE"
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "gae_app")
+
+ def test_detect_kubernetes(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_k8s_metadata,
+ )
+ with patch:
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "k8s_container")
+
+ def test_detect_functions(self):
+ for env in _monitored_resources._FUNCTION_ENV_VARS:
+ os.environ[env] = "TRUE"
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_function")
+
+ def test_detect_legacy_functions(self):
+ for env in _monitored_resources._LEGACY_FUNCTION_ENV_VARS:
+ os.environ[env] = "TRUE"
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_function")
+
+ def test_detect_cloud_run(self):
+ for env in _monitored_resources._CLOUD_RUN_ENV_VARS:
+ os.environ[env] = "TRUE"
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_run_revision")
+
+ def test_detect_compute_engine(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_gce_metadata,
+ )
+ with patch:
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "gce_instance")
+
+ def test_detection_unknown(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ return_value=None,
+ )
+ with patch:
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "global")
diff --git a/tests/unit/handlers/test_container_engine.py b/tests/unit/handlers/test_container_engine.py
index d2ae83881..280ab9cf0 100644
--- a/tests/unit/handlers/test_container_engine.py
+++ b/tests/unit/handlers/test_container_engine.py
@@ -40,7 +40,7 @@ def test_format(self):
handler = self._make_one()
logname = "loggername"
- message = "hello world"
+ message = "hello world,嗨 世界"
record = logging.LogRecord(
logname, logging.INFO, None, None, message, None, None
)
@@ -53,4 +53,4 @@ def test_format(self):
}
payload = handler.format(record)
- self.assertEqual(payload, json.dumps(expected_payload))
+ self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False))
diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py
index d84c19635..51e2f0703 100644
--- a/tests/unit/handlers/test_handlers.py
+++ b/tests/unit/handlers/test_handlers.py
@@ -14,6 +14,13 @@
import logging
import unittest
+from unittest.mock import patch
+import mock
+
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ _FUNCTION_ENV_VARS,
+ _GAE_ENV_VARS,
+)
class TestCloudLoggingHandler(unittest.TestCase):
@@ -31,19 +38,27 @@ def _make_one(self, *args, **kw):
def test_ctor_defaults(self):
import sys
- from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_global_resource,
+ )
from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME
- client = _Client(self.PROJECT)
- handler = self._make_one(client, transport=_Transport)
- self.assertEqual(handler.name, DEFAULT_LOGGER_NAME)
- self.assertIs(handler.client, client)
- self.assertIsInstance(handler.transport, _Transport)
- self.assertIs(handler.transport.client, client)
- self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME)
- self.assertIs(handler.resource, _GLOBAL_RESOURCE)
- self.assertIsNone(handler.labels)
- self.assertIs(handler.stream, sys.stderr)
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ return_value=None,
+ )
+ with patch:
+ client = _Client(self.PROJECT)
+ handler = self._make_one(client, transport=_Transport)
+ self.assertEqual(handler.name, DEFAULT_LOGGER_NAME)
+ self.assertIs(handler.client, client)
+ self.assertIsInstance(handler.transport, _Transport)
+ self.assertIs(handler.transport.client, client)
+ self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME)
+ global_resource = _create_global_resource(self.PROJECT)
+ self.assertEqual(handler.resource, global_resource)
+ self.assertIsNone(handler.labels)
+ self.assertIs(handler.stream, sys.stderr)
def test_ctor_explicit(self):
import io
@@ -156,6 +171,49 @@ def test_setup_logging_excludes(self):
self.assertNotIn(handler, excluded_logger.handlers)
self.assertFalse(excluded_logger.propagate)
+ @patch.dict("os.environ", {envar: "1" for envar in _FUNCTION_ENV_VARS})
+ def test_remove_handlers_gcf(self):
+ logger = logging.getLogger()
+ # add fake handler
+ added_handler = logging.StreamHandler()
+ logger.addHandler(added_handler)
+
+ handler = _Handler(logging.INFO)
+ self._call_fut(handler)
+ self.assertNotIn(added_handler, logger.handlers)
+ # handler should be removed from logger
+ self.assertEqual(len(logger.handlers), 1)
+
+ @patch.dict("os.environ", {envar: "1" for envar in _GAE_ENV_VARS})
+ def test_remove_handlers_gae(self):
+ logger = logging.getLogger()
+ # add fake handler
+ added_handler = logging.StreamHandler()
+ logger.addHandler(added_handler)
+
+ handler = _Handler(logging.INFO)
+ self._call_fut(handler)
+ self.assertNotIn(added_handler, logger.handlers)
+ # handler should be removed from logger
+ self.assertEqual(len(logger.handlers), 1)
+
+ def test_keep_handlers_others(self):
+ # mock non-cloud environment
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ return_value=None,
+ )
+ with patch:
+ # add fake handler
+ added_handler = logging.StreamHandler()
+ logger = logging.getLogger()
+ logger.addHandler(added_handler)
+
+ handler = _Handler(logging.INFO)
+ self._call_fut(handler)
+ # added handler should remain in logger
+ self.assertIn(added_handler, logger.handlers)
+
def setUp(self):
self._handlers_cache = logging.getLogger().handlers[:]
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 8083e3c56..f33f1cbdc 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -718,7 +718,7 @@ def test_list_metrics_with_paging(self):
def test_get_default_handler_app_engine(self):
import os
from google.cloud._testing import _Monkey
- from google.cloud.logging_v2.client import _APPENGINE_FLEXIBLE_ENV_VM
+ from google.cloud.logging_v2.handlers._monitored_resources import _GAE_ENV_VARS
from google.cloud.logging.handlers import AppEngineHandler
credentials = _make_credentials()
@@ -726,7 +726,9 @@ def test_get_default_handler_app_engine(self):
project=self.PROJECT, credentials=credentials, _use_grpc=False
)
- with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: "True"}):
+ gae_env_vars = {var: "TRUE" for var in _GAE_ENV_VARS}
+
+ with _Monkey(os, environ=gae_env_vars):
handler = client.get_default_handler()
handler.transport.worker.stop()
@@ -742,7 +744,7 @@ def test_get_default_handler_container_engine(self):
)
patch = mock.patch(
- "google.cloud.logging_v2.client.retrieve_metadata_server",
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
return_value="test-gke-cluster",
)
diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py
index 869330633..53ecac8f5 100644
--- a/tests/unit/test_logger.py
+++ b/tests/unit/test_logger.py
@@ -99,11 +99,15 @@ def test_batch_w_alternate_client(self):
self.assertIs(batch.client, client2)
def test_log_empty_defaults_w_default_labels(self):
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
+
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
- "resource": {"type": "global", "labels": {}},
+ "resource": detect_resource(self.PROJECT)._to_dict(),
"labels": DEFAULT_LABELS,
}
]
@@ -170,12 +174,17 @@ def test_log_empty_w_explicit(self):
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_text_defaults(self):
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
+
+ RESOURCE = detect_resource(self.PROJECT)._to_dict()
TEXT = "TEXT"
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"textPayload": TEXT,
- "resource": {"type": "global", "labels": {}},
+ "resource": RESOURCE,
}
]
client = _Client(self.PROJECT)
@@ -187,13 +196,18 @@ def test_log_text_defaults(self):
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_text_w_unicode_and_default_labels(self):
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
+
TEXT = "TEXT"
+ RESOURCE = detect_resource(self.PROJECT)._to_dict()
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"textPayload": TEXT,
- "resource": {"type": "global", "labels": {}},
+ "resource": RESOURCE,
"labels": DEFAULT_LABELS,
}
]
@@ -263,12 +277,17 @@ def test_log_text_explicit(self):
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_struct_defaults(self):
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
+
STRUCT = {"message": "MESSAGE", "weather": "cloudy"}
+ RESOURCE = detect_resource(self.PROJECT)._to_dict()
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"jsonPayload": STRUCT,
- "resource": {"type": "global", "labels": {}},
+ "resource": RESOURCE,
}
]
client = _Client(self.PROJECT)
@@ -280,13 +299,18 @@ def test_log_struct_defaults(self):
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_struct_w_default_labels(self):
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
+
STRUCT = {"message": "MESSAGE", "weather": "cloudy"}
+ RESOURCE = detect_resource(self.PROJECT)._to_dict()
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"jsonPayload": STRUCT,
- "resource": {"type": "global", "labels": {}},
+ "resource": RESOURCE,
"labels": DEFAULT_LABELS,
}
]
@@ -359,13 +383,16 @@ def test_log_proto_defaults(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
message = Struct(fields={"foo": Value(bool_value=True)})
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"protoPayload": json.loads(MessageToJson(message)),
- "resource": {"type": "global", "labels": {}},
+ "resource": detect_resource(self.PROJECT)._to_dict(),
}
]
client = _Client(self.PROJECT)
@@ -380,6 +407,9 @@ def test_log_proto_w_default_labels(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
+ from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ )
message = Struct(fields={"foo": Value(bool_value=True)})
DEFAULT_LABELS = {"foo": "spam"}
@@ -387,7 +417,7 @@ def test_log_proto_w_default_labels(self):
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"protoPayload": json.loads(MessageToJson(message)),
- "resource": {"type": "global", "labels": {}},
+ "resource": detect_resource(self.PROJECT)._to_dict(),
"labels": DEFAULT_LABELS,
}
]