diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 13fc69ce9..c4e82889d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5efdf8d38e5a22c1ec9e5541cbdfde56399bdffcb6f531183f84ac66052a8024 -# created: 2024-10-25 + digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c +# created: 2025-03-31T16:51:40.130756953Z diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4ddae6fb2..a6a81621d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -26,5 +26,5 @@ tests/test_identity_pool.py @googleapis/googleap tests/test_pluggable.py @googleapis/googleapis-auth @googleapis/aion-sdk tests/test_sts.py @googleapis/googleapis-auth @googleapis/aion-sdk tests/test_impersonated_credentials.py @googleapis/googleapis-auth @googleapis/aion-sdk -/samples/ @googleapis/python-samples-owners +/samples/ @googleapis/googleapis-auth @googleapis/aion-sdk @googleapis/python-samples-owners system_tests/secrets.tar.enc # Remove noise from test creds. diff --git a/.kokoro/build-systests.sh b/.kokoro/build-systests.sh index a2947c25a..3c8dcf85b 100755 --- a/.kokoro/build-systests.sh +++ b/.kokoro/build-systests.sh @@ -28,7 +28,7 @@ export PYTHONUNBUFFERED=1 python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3 -m pip install --upgrade --quiet nox +python3 -m pip install nox==2024.10.9 python3 -m nox --version # Setup service account credentials. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296..000000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 816817c67..000000000 --- a/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1 +0,0 @@ -nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index 66eacc82f..000000000 --- a/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index a1aa466c8..000000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "google-auth-library-python/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/google-auth-library-python/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-dev` instead of the - # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-dev" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index d3f0deae3..000000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/google-auth-library-python/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 233205d58..000000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index dcc6d1fef..000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/google-auth-library-python/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -cd github/google-auth-library-python -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0..000000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index 006d8ef93..000000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,509 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.1.1 \ - --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ - --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e - # via -r requirements.in -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.18.2 \ - --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ - --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.9.2 \ - --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ - --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.in - # rich -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r requirements.in -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via -r requirements.in diff --git a/.kokoro/release/common.cfg b/.kokoro/samples/python3.13/common.cfg similarity index 51% rename from .kokoro/release/common.cfg rename to .kokoro/samples/python3.13/common.cfg index 43bec962c..c1dd4316b 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/samples/python3.13/common.cfg @@ -7,9 +7,18 @@ action { } } +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "unit-3.13" +} + # Download trampoline resources. gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-auth-library-python" + # Use the trampoline script to run in docker. build_file: "google-auth-library-python/.kokoro/trampoline.sh" @@ -20,24 +29,9 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/google-auth-library-python/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/google-auth-library-python/**/*.tar.gz" - strip_prefix: "github/google-auth-library-python" - } + value: "github/google-auth-library-python/.kokoro/build.sh" } +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/google-auth-library-python/.kokoro/samples-test-setup.sh" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000..83eace873 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/google-auth-library-python/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 55910c8ba..53e365bc4 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/CHANGELOG.md b/CHANGELOG.md index cb6a41358..a9880be63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,57 @@ [1]: https://pypi.org/project/google-auth/#history +## [2.40.3](https://github.com/googleapis/google-auth-library-python/compare/v2.40.2...v2.40.3) (2025-06-04) + + +### Bug Fixes + +* Auth fetch token from default endpoint ([#1779](https://github.com/googleapis/google-auth-library-python/issues/1779)) ([88891cc](https://github.com/googleapis/google-auth-library-python/commit/88891cc596640b0bb3a2891532e2d32f2c9f0ec3)) +* Remove unnecessary call to mds service ([#1769](https://github.com/googleapis/google-auth-library-python/issues/1769)) ([7c61c7d](https://github.com/googleapis/google-auth-library-python/commit/7c61c7d0a42ceec3eab693065745a74f524acab0)) +* Retry 504 errors ([#1767](https://github.com/googleapis/google-auth-library-python/issues/1767)) ([554f967](https://github.com/googleapis/google-auth-library-python/commit/554f967620da2b02e5d44ac7463dcc2407ace5dd)) + +## [2.40.2](https://github.com/googleapis/google-auth-library-python/compare/v2.40.1...v2.40.2) (2025-05-21) + + +### Bug Fixes + +* Remove sync response logs in AuthorizedSession ([97ed1c8](https://github.com/googleapis/google-auth-library-python/commit/97ed1c8ef1a797af26c5639b618aa26360e9d868)) +* Update test to consider new error message from cryptography ([#1765](https://github.com/googleapis/google-auth-library-python/issues/1765)) ([44e38b6](https://github.com/googleapis/google-auth-library-python/commit/44e38b60002f9dbd524b1fe82fa8d4295afc68bc)) + +## [2.40.1](https://github.com/googleapis/google-auth-library-python/compare/v2.40.0...v2.40.1) (2025-05-06) + + +### Bug Fixes + +* Disable logging response body for async logs ([#1756](https://github.com/googleapis/google-auth-library-python/issues/1756)) ([2f0ddfe](https://github.com/googleapis/google-auth-library-python/commit/2f0ddfeb9f6c726c68beebd7eefd32c86f7f0963)) + +## [2.40.0](https://github.com/googleapis/google-auth-library-python/compare/v2.39.0...v2.40.0) (2025-04-29) + + +### Features + +* Add request response logging to auth ([#1678](https://github.com/googleapis/google-auth-library-python/issues/1678)) ([77ad53e](https://github.com/googleapis/google-auth-library-python/commit/77ad53eb00c74b3badc486c8207a16dbc49f37e5)) + + +### Bug Fixes + +* Correct webauthn JSON parsing to be compliant with standard. ([#1658](https://github.com/googleapis/google-auth-library-python/issues/1658)) ([0c5ef36](https://github.com/googleapis/google-auth-library-python/commit/0c5ef364fb13ca9d7d17100166de87732d752de8)) + +## [2.39.0](https://github.com/googleapis/google-auth-library-python/compare/v2.38.0...v2.39.0) (2025-04-14) + + +### Features + +* Adds GA support for X.509 workload identity federation ([#1695](https://github.com/googleapis/google-auth-library-python/issues/1695)) ([7495960](https://github.com/googleapis/google-auth-library-python/commit/74959605400f9a1976bbdc52c029943b634eb553)) + + +### Bug Fixes + +* Add impersonated SA via local ADC support for fetch_id_token ([#1740](https://github.com/googleapis/google-auth-library-python/issues/1740)) ([f249764](https://github.com/googleapis/google-auth-library-python/commit/f24976452d741de6a49d9b7a85cdab47812f5312)) +* Add missing packaging dependency for feature requiring urllib3 ([#1732](https://github.com/googleapis/google-auth-library-python/issues/1732)) ([221f4a8](https://github.com/googleapis/google-auth-library-python/commit/221f4a82fa25c1ad453b85bc8b7f2fc304724879)) +* Add request timeout for MDS requests ([#1699](https://github.com/googleapis/google-auth-library-python/issues/1699)) ([9f7d3fa](https://github.com/googleapis/google-auth-library-python/commit/9f7d3fa92c0e656a1c970182833abe2d0d3ad3ee)) +* Explicitly declare support for Python 3.13 ([#1741](https://github.com/googleapis/google-auth-library-python/issues/1741)) ([6fd04d5](https://github.com/googleapis/google-auth-library-python/commit/6fd04d57df90866f24b554c489f8f2653467d70e)) + ## [2.38.0](https://github.com/googleapis/google-auth-library-python/compare/v2.37.0...v2.38.0) (2025-01-23) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 68d598c93..3975a3d2a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -19,7 +19,7 @@ A few notes on making changes to ``google-auth-library-python``. using ``nox -s docs``. - The change must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 across macOS, Linux, and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 across macOS, Linux, and Windows. - The codebase *must* have 100% test statement coverage after each commit. You can test coverage via ``nox -e cover``. diff --git a/google/auth/_default.py b/google/auth/_default.py index 1234fb25d..cf0cdd772 100644 --- a/google/auth/_default.py +++ b/google/auth/_default.py @@ -484,42 +484,8 @@ def _get_impersonated_service_account_credentials(filename, info, scopes): from google.auth import impersonated_credentials try: - source_credentials_info = info.get("source_credentials") - source_credentials_type = source_credentials_info.get("type") - if source_credentials_type == _AUTHORIZED_USER_TYPE: - source_credentials, _ = _get_authorized_user_credentials( - filename, source_credentials_info - ) - elif source_credentials_type == _SERVICE_ACCOUNT_TYPE: - source_credentials, _ = _get_service_account_credentials( - filename, source_credentials_info - ) - elif source_credentials_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE: - source_credentials, _ = _get_external_account_authorized_user_credentials( - filename, source_credentials_info - ) - else: - raise exceptions.InvalidType( - "source credential of type {} is not supported.".format( - source_credentials_type - ) - ) - impersonation_url = info.get("service_account_impersonation_url") - start_index = impersonation_url.rfind("/") - end_index = impersonation_url.find(":generateAccessToken") - if start_index == -1 or end_index == -1 or start_index > end_index: - raise exceptions.InvalidValue( - "Cannot extract target principal from {}".format(impersonation_url) - ) - target_principal = impersonation_url[start_index + 1 : end_index] - delegates = info.get("delegates") - quota_project_id = info.get("quota_project_id") - credentials = impersonated_credentials.Credentials( - source_credentials, - target_principal, - scopes, - delegates, - quota_project_id=quota_project_id, + credentials = impersonated_credentials.Credentials.from_impersonated_service_account_info( + info, scopes=scopes ) except ValueError as caught_exc: msg = "Failed to load impersonated service account credentials from {}".format( diff --git a/google/auth/_helpers.py b/google/auth/_helpers.py index a6c07f7d8..78fe22f72 100644 --- a/google/auth/_helpers.py +++ b/google/auth/_helpers.py @@ -18,15 +18,38 @@ import calendar import datetime from email.message import Message +import hashlib +import json +import logging import sys +from typing import Any, Dict, Mapping, Optional, Union import urllib from google.auth import exceptions + +# _BASE_LOGGER_NAME is the base logger for all google-based loggers. +_BASE_LOGGER_NAME = "google" + +# _LOGGING_INITIALIZED ensures that base logger is only configured once +# (unless already configured by the end-user). +_LOGGING_INITIALIZED = False + + # The smallest MDS cache used by this library stores tokens until 4 minutes from # expiry. REFRESH_THRESHOLD = datetime.timedelta(minutes=3, seconds=45) +# TODO(https://github.com/googleapis/google-auth-library-python/issues/1684): Audit and update the list below. +_SENSITIVE_FIELDS = { + "accessToken", + "access_token", + "id_token", + "client_id", + "refresh_token", + "client_secret", +} + def copy_docstring(source_class): """Decorator that copies a method's docstring from another class. @@ -271,3 +294,220 @@ def is_python_3(): bool: True if the Python interpreter is Python 3 and False otherwise. """ return sys.version_info > (3, 0) + + +def _hash_sensitive_info(data: Union[dict, list]) -> Union[dict, list, str]: + """ + Hashes sensitive information within a dictionary. + + Args: + data: The dictionary containing data to be processed. + + Returns: + A new dictionary with sensitive values replaced by their SHA512 hashes. + If the input is a list, returns a list with each element recursively processed. + If the input is neither a dict nor a list, returns the type of the input as a string. + + """ + if isinstance(data, dict): + hashed_data: Dict[Any, Union[Optional[str], dict, list]] = {} + for key, value in data.items(): + if key in _SENSITIVE_FIELDS and not isinstance(value, (dict, list)): + hashed_data[key] = _hash_value(value, key) + elif isinstance(value, (dict, list)): + hashed_data[key] = _hash_sensitive_info(value) + else: + hashed_data[key] = value + return hashed_data + elif isinstance(data, list): + hashed_list = [] + for val in data: + hashed_list.append(_hash_sensitive_info(val)) + return hashed_list + else: + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1701): + # Investigate and hash sensitive info before logging when the data type is + # not a dict or a list. + return str(type(data)) + + +def _hash_value(value, field_name: str) -> Optional[str]: + """Hashes a value and returns a formatted hash string.""" + if value is None: + return None + encoded_value = str(value).encode("utf-8") + hash_object = hashlib.sha512() + hash_object.update(encoded_value) + hex_digest = hash_object.hexdigest() + return f"hashed_{field_name}-{hex_digest}" + + +def _logger_configured(logger: logging.Logger) -> bool: + """Determines whether `logger` has non-default configuration + + Args: + logger: The logger to check. + + Returns: + bool: Whether the logger has any non-default configuration. + """ + return ( + logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate + ) + + +def is_logging_enabled(logger: logging.Logger) -> bool: + """ + Checks if debug logging is enabled for the given logger. + + Args: + logger: The logging.Logger instance to check. + + Returns: + True if debug logging is enabled, False otherwise. + """ + # NOTE: Log propagation to the root logger is disabled unless + # the base logger i.e. logging.getLogger("google") is + # explicitly configured by the end user. Ideally this + # needs to happen in the client layer (already does for GAPICs). + # However, this is implemented here to avoid logging + # (if a root logger is configured) when a version of google-auth + # which supports logging is used with: + # - an older version of a GAPIC which does not support logging. + # - Apiary client which does not support logging. + global _LOGGING_INITIALIZED + if not _LOGGING_INITIALIZED: + base_logger = logging.getLogger(_BASE_LOGGER_NAME) + if not _logger_configured(base_logger): + base_logger.propagate = False + _LOGGING_INITIALIZED = True + + return logger.isEnabledFor(logging.DEBUG) + + +def request_log( + logger: logging.Logger, + method: str, + url: str, + body: Optional[bytes], + headers: Optional[Mapping[str, str]], +) -> None: + """ + Logs an HTTP request at the DEBUG level if logging is enabled. + + Args: + logger: The logging.Logger instance to use. + method: The HTTP method (e.g., "GET", "POST"). + url: The URL of the request. + body: The request body (can be None). + headers: The request headers (can be None). + """ + if is_logging_enabled(logger): + content_type = ( + headers["Content-Type"] if headers and "Content-Type" in headers else "" + ) + json_body = _parse_request_body(body, content_type=content_type) + logged_body = _hash_sensitive_info(json_body) + logger.debug( + "Making request...", + extra={ + "httpRequest": { + "method": method, + "url": url, + "body": logged_body, + "headers": headers, + } + }, + ) + + +def _parse_request_body(body: Optional[bytes], content_type: str = "") -> Any: + """ + Parses a request body, handling bytes and string types, and different content types. + + Args: + body (Optional[bytes]): The request body. + content_type (str): The content type of the request body, e.g., "application/json", + "application/x-www-form-urlencoded", or "text/plain". If empty, attempts + to parse as JSON. + + Returns: + Parsed body (dict, str, or None). + - JSON: Decodes if content_type is "application/json" or None (fallback). + - URL-encoded: Parses if content_type is "application/x-www-form-urlencoded". + - Plain text: Returns string if content_type is "text/plain". + - None: Returns if body is None, UTF-8 decode fails, or content_type is unknown. + """ + if body is None: + return None + try: + body_str = body.decode("utf-8") + except (UnicodeDecodeError, AttributeError): + return None + content_type = content_type.lower() + if not content_type or "application/json" in content_type: + try: + return json.loads(body_str) + except (json.JSONDecodeError, TypeError): + return body_str + if "application/x-www-form-urlencoded" in content_type: + parsed_query = urllib.parse.parse_qs(body_str) + result = {k: v[0] for k, v in parsed_query.items()} + return result + if "text/plain" in content_type: + return body_str + return None + + +def _parse_response(response: Any) -> Any: + """ + Parses a response, attempting to decode JSON. + + Args: + response: The response object to parse. This can be any type, but + it is expected to have a `json()` method if it contains JSON. + + Returns: + The parsed response. If the response contains valid JSON, the + decoded JSON object (e.g., a dictionary or list) is returned. + If the response does not have a `json()` method or if the JSON + decoding fails, None is returned. + """ + try: + json_response = response.json() + return json_response + except Exception: + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1744): + # Parse and return response payload as json based on different content types. + return None + + +def _response_log_base(logger: logging.Logger, parsed_response: Any) -> None: + """ + Logs a parsed HTTP response at the DEBUG level. + + This internal helper function takes a parsed response and logs it + using the provided logger. It also applies a hashing function to + potentially sensitive information before logging. + + Args: + logger: The logging.Logger instance to use for logging. + parsed_response: The parsed HTTP response object (e.g., a dictionary, + list, or the original response if parsing failed). + """ + + logged_response = _hash_sensitive_info(parsed_response) + logger.debug("Response received...", extra={"httpResponse": logged_response}) + + +def response_log(logger: logging.Logger, response: Any) -> None: + """ + Logs an HTTP response at the DEBUG level if logging is enabled. + + Args: + logger: The logging.Logger instance to use. + response: The HTTP response object to log. + """ + if is_logging_enabled(logger): + json_response = _parse_response(response) + _response_log_base(logger, json_response) diff --git a/google/auth/aio/_helpers.py b/google/auth/aio/_helpers.py new file mode 100644 index 000000000..1d5a4618c --- /dev/null +++ b/google/auth/aio/_helpers.py @@ -0,0 +1,62 @@ +# Copyright 2025 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for commonly used utilities.""" + + +import logging +from typing import Any + +from google.auth import _helpers + + +async def _parse_response_async(response: Any) -> Any: + """ + Parses an async response, attempting to decode JSON. + + Args: + response: The response object to parse. This can be any type, but + it is expected to have a `json()` method if it contains JSON. + + Returns: + The parsed response. If the response contains valid JSON, the + decoded JSON object (e.g., a dictionary) is returned. + If the response does not have a `json()` method or if the JSON + decoding fails, None is returned. + """ + try: + json_response = await response.json() + return json_response + except Exception: + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1745): + # Parse and return response payload as json based on different content types. + return None + + +async def response_log_async(logger: logging.Logger, response: Any) -> None: + """ + Logs an Async HTTP response at the DEBUG level if logging is enabled. + + Args: + logger: The logging.Logger instance to use. + response: The HTTP response object to log. + """ + if _helpers.is_logging_enabled(logger): + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1755): + # Parsing the response for async streaming logging results in + # the stream to be empty downstream. For now, we will not be logging + # the response for async responses until we investigate further. + # json_response = await _parse_response_async(response) + json_response = None + _helpers._response_log_base(logger, json_response) diff --git a/google/auth/aio/transport/aiohttp.py b/google/auth/aio/transport/aiohttp.py index 074d1491c..67a19f952 100644 --- a/google/auth/aio/transport/aiohttp.py +++ b/google/auth/aio/transport/aiohttp.py @@ -16,6 +16,7 @@ """ import asyncio +import logging from typing import AsyncGenerator, Mapping, Optional try: @@ -27,8 +28,11 @@ from google.auth import _helpers from google.auth import exceptions +from google.auth.aio import _helpers as _helpers_async from google.auth.aio import transport +_LOGGER = logging.getLogger(__name__) + class Response(transport.Response): """ @@ -155,6 +159,7 @@ async def __call__( self._session = aiohttp.ClientSession() client_timeout = aiohttp.ClientTimeout(total=timeout) + _helpers.request_log(_LOGGER, method, url, body, headers) response = await self._session.request( method, url, @@ -163,6 +168,7 @@ async def __call__( timeout=client_timeout, **kwargs, ) + await _helpers_async.response_log_async(_LOGGER, response) return Response(response) except aiohttp.ClientError as caught_exc: diff --git a/google/auth/compute_engine/_metadata.py b/google/auth/compute_engine/_metadata.py index 06f99de0e..ddbe8ac2f 100644 --- a/google/auth/compute_engine/_metadata.py +++ b/google/auth/compute_engine/_metadata.py @@ -159,6 +159,7 @@ def get( retry_count=5, headers=None, return_none_for_not_found_error=False, + timeout=_METADATA_DEFAULT_TIMEOUT, ): """Fetch a resource from the metadata server. @@ -178,6 +179,7 @@ def get( headers (Optional[Mapping[str, str]]): Headers for the request. return_none_for_not_found_error (Optional[bool]): If True, returns None for 404 error instead of throwing an exception. + timeout (int): How long to wait, in seconds for the metadata server to respond. Returns: Union[Mapping, str]: If the metadata server returns JSON, a mapping of @@ -204,7 +206,9 @@ def get( failure_reason = None for attempt in backoff: try: - response = request(url=url, method="GET", headers=headers_to_use) + response = request( + url=url, method="GET", headers=headers_to_use, timeout=timeout + ) if response.status in transport.DEFAULT_RETRYABLE_STATUS_CODES: _LOGGER.warning( "Compute Engine Metadata server unavailable on " diff --git a/google/auth/compute_engine/credentials.py b/google/auth/compute_engine/credentials.py index f0126c0a8..eb50d288b 100644 --- a/google/auth/compute_engine/credentials.py +++ b/google/auth/compute_engine/credentials.py @@ -124,8 +124,9 @@ def refresh(self, request): scopes = self._scopes if self._scopes is not None else self._default_scopes try: self._retrieve_info(request) + # Always fetch token with default service account email. self.token, self.expiry = _metadata.get_service_account_token( - request, service_account=self._service_account_email, scopes=scopes + request, service_account="default", scopes=scopes ) except exceptions.TransportError as caught_exc: new_exc = exceptions.RefreshError(caught_exc) diff --git a/google/auth/identity_pool.py b/google/auth/identity_pool.py index 47f9a5571..c06f88428 100644 --- a/google/auth/identity_pool.py +++ b/google/auth/identity_pool.py @@ -41,6 +41,7 @@ except ImportError: # pragma: NO COVER from collections import Mapping # type: ignore import abc +import base64 import json import os from typing import NamedTuple @@ -145,9 +146,88 @@ def get_subject_token(self, context, request): class _X509Supplier(SubjectTokenSupplier): """Internal supplier for X509 workload credentials. This class is used internally and always returns an empty string as the subject token.""" + def __init__(self, trust_chain_path, leaf_cert_callback): + self._trust_chain_path = trust_chain_path + self._leaf_cert_callback = leaf_cert_callback + @_helpers.copy_docstring(SubjectTokenSupplier) def get_subject_token(self, context, request): - return "" + # Import OpennSSL inline because it is an extra import only required by customers + # using mTLS. + from OpenSSL import crypto + + leaf_cert = crypto.load_certificate( + crypto.FILETYPE_PEM, self._leaf_cert_callback() + ) + trust_chain = self._read_trust_chain() + cert_chain = [] + + cert_chain.append(_X509Supplier._encode_cert(leaf_cert)) + + if trust_chain is None or len(trust_chain) == 0: + return json.dumps(cert_chain) + + # Append the first cert if it is not the leaf cert. + first_cert = _X509Supplier._encode_cert(trust_chain[0]) + if first_cert != cert_chain[0]: + cert_chain.append(first_cert) + + for i in range(1, len(trust_chain)): + encoded = _X509Supplier._encode_cert(trust_chain[i]) + # Check if the current cert is the leaf cert and raise an exception if it is. + if encoded == cert_chain[0]: + raise exceptions.RefreshError( + "The leaf certificate must be at the top of the trust chain file" + ) + else: + cert_chain.append(encoded) + return json.dumps(cert_chain) + + def _read_trust_chain(self): + # Import OpennSSL inline because it is an extra import only required by customers + # using mTLS. + from OpenSSL import crypto + + certificate_trust_chain = [] + # If no trust chain path was provided, return an empty list. + if self._trust_chain_path is None or self._trust_chain_path == "": + return certificate_trust_chain + try: + # Open the trust chain file. + with open(self._trust_chain_path, "rb") as f: + trust_chain_data = f.read() + # Split PEM data into individual certificates. + cert_blocks = trust_chain_data.split(b"-----BEGIN CERTIFICATE-----") + for cert_block in cert_blocks: + # Skip empty blocks. + if cert_block.strip(): + cert_data = b"-----BEGIN CERTIFICATE-----" + cert_block + try: + # Load each certificate and add it to the trust chain. + cert = crypto.load_certificate( + crypto.FILETYPE_PEM, cert_data + ) + certificate_trust_chain.append(cert) + except Exception as e: + raise exceptions.RefreshError( + "Error loading PEM certificates from the trust chain file '{}'".format( + self._trust_chain_path + ) + ) from e + return certificate_trust_chain + except FileNotFoundError: + raise exceptions.RefreshError( + "Trust chain file '{}' was not found.".format(self._trust_chain_path) + ) + + def _encode_cert(cert): + # Import OpennSSL inline because it is an extra import only required by customers + # using mTLS. + from OpenSSL import crypto + + return base64.b64encode( + crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) + ).decode("utf-8") def _parse_token_data(token_content, format_type="text", subject_token_field_name=None): @@ -296,7 +376,9 @@ def __init__( self._credential_source_headers, ) else: # self._credential_source_certificate - self._subject_token_supplier = _X509Supplier() + self._subject_token_supplier = _X509Supplier( + self._trust_chain_path, self._get_cert_bytes + ) @_helpers.copy_docstring(external_account.Credentials) def retrieve_subject_token(self, request): @@ -314,6 +396,10 @@ def _get_mtls_cert_and_key_paths(self): self._certificate_config_location ) + def _get_cert_bytes(self): + cert_path, _ = self._get_mtls_cert_and_key_paths() + return _mtls_helper._read_cert_file(cert_path) + def _mtls_required(self): return self._credential_source_certificate is not None @@ -350,6 +436,9 @@ def _validate_certificate_config(self): use_default = self._credential_source_certificate.get( "use_default_certificate_config" ) + self._trust_chain_path = self._credential_source_certificate.get( + "trust_chain_path" + ) if self._certificate_config_location and use_default: raise exceptions.MalformedError( "Invalid certificate configuration, certificate_config_location cannot be specified when use_default_certificate_config = true." diff --git a/google/auth/impersonated_credentials.py b/google/auth/impersonated_credentials.py index ed7e3f00b..d49998cfb 100644 --- a/google/auth/impersonated_credentials.py +++ b/google/auth/impersonated_credentials.py @@ -47,6 +47,12 @@ _GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token" +_SOURCE_CREDENTIAL_AUTHORIZED_USER_TYPE = "authorized_user" +_SOURCE_CREDENTIAL_SERVICE_ACCOUNT_TYPE = "service_account" +_SOURCE_CREDENTIAL_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = ( + "external_account_authorized_user" +) + def _make_iam_token_request( request, @@ -410,6 +416,75 @@ def with_scopes(self, scopes, default_scopes=None): cred._target_scopes = scopes or default_scopes return cred + @classmethod + def from_impersonated_service_account_info(cls, info, scopes=None): + """Creates a Credentials instance from parsed impersonated service account credentials info. + + Args: + info (Mapping[str, str]): The impersonated service account credentials info in Google + format. + scopes (Sequence[str]): Optional list of scopes to include in the + credentials. + + Returns: + google.oauth2.credentials.Credentials: The constructed + credentials. + + Raises: + InvalidType: If the info["source_credentials"] are not a supported impersonation type + InvalidValue: If the info["service_account_impersonation_url"] is not in the expected format. + ValueError: If the info is not in the expected format. + """ + + source_credentials_info = info.get("source_credentials") + source_credentials_type = source_credentials_info.get("type") + if source_credentials_type == _SOURCE_CREDENTIAL_AUTHORIZED_USER_TYPE: + from google.oauth2 import credentials + + source_credentials = credentials.Credentials.from_authorized_user_info( + source_credentials_info + ) + elif source_credentials_type == _SOURCE_CREDENTIAL_SERVICE_ACCOUNT_TYPE: + from google.oauth2 import service_account + + source_credentials = service_account.Credentials.from_service_account_info( + source_credentials_info + ) + elif ( + source_credentials_type + == _SOURCE_CREDENTIAL_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE + ): + from google.auth import external_account_authorized_user + + source_credentials = external_account_authorized_user.Credentials.from_info( + source_credentials_info + ) + else: + raise exceptions.InvalidType( + "source credential of type {} is not supported.".format( + source_credentials_type + ) + ) + + impersonation_url = info.get("service_account_impersonation_url") + start_index = impersonation_url.rfind("/") + end_index = impersonation_url.find(":generateAccessToken") + if start_index == -1 or end_index == -1 or start_index > end_index: + raise exceptions.InvalidValue( + "Cannot extract target principal from {}".format(impersonation_url) + ) + target_principal = impersonation_url[start_index + 1 : end_index] + delegates = info.get("delegates") + quota_project_id = info.get("quota_project_id") + + return cls( + source_credentials, + target_principal, + scopes, + delegates, + quota_project_id=quota_project_id, + ) + class IDTokenCredentials(credentials.CredentialsWithQuotaProject): """Open ID Connect ID Token-based service account credentials. diff --git a/google/auth/transport/__init__.py b/google/auth/transport/__init__.py index 724568e58..457576350 100644 --- a/google/auth/transport/__init__.py +++ b/google/auth/transport/__init__.py @@ -30,6 +30,7 @@ DEFAULT_RETRYABLE_STATUS_CODES = ( http_client.INTERNAL_SERVER_ERROR, http_client.SERVICE_UNAVAILABLE, + http_client.GATEWAY_TIMEOUT, http_client.REQUEST_TIMEOUT, http_client.TOO_MANY_REQUESTS, ) diff --git a/google/auth/transport/_aiohttp_requests.py b/google/auth/transport/_aiohttp_requests.py index bc4d9dc69..36366be51 100644 --- a/google/auth/transport/_aiohttp_requests.py +++ b/google/auth/transport/_aiohttp_requests.py @@ -22,14 +22,20 @@ import asyncio import functools +import logging import aiohttp # type: ignore import urllib3 # type: ignore +from google.auth import _helpers from google.auth import exceptions from google.auth import transport +from google.auth.aio import _helpers as _helpers_async from google.auth.transport import requests + +_LOGGER = logging.getLogger(__name__) + # Timeout can be re-defined depending on async requirement. Currently made 60s more than # sync timeout. _DEFAULT_TIMEOUT = 180 # in seconds @@ -182,10 +188,11 @@ async def __call__( self.session = aiohttp.ClientSession( auto_decompress=False ) # pragma: NO COVER - requests._LOGGER.debug("Making request: %s %s", method, url) + _helpers.request_log(_LOGGER, method, url, body, headers) response = await self.session.request( method, url, data=body, headers=headers, timeout=timeout, **kwargs ) + await _helpers_async.response_log_async(_LOGGER, response) return _CombinedResponse(response) except aiohttp.ClientError as caught_exc: diff --git a/google/auth/transport/_http_client.py b/google/auth/transport/_http_client.py index cec0ab73f..898a86519 100644 --- a/google/auth/transport/_http_client.py +++ b/google/auth/transport/_http_client.py @@ -19,6 +19,7 @@ import socket import urllib +from google.auth import _helpers from google.auth import exceptions from google.auth import transport @@ -99,10 +100,11 @@ def __call__( connection = http_client.HTTPConnection(parts.netloc, timeout=timeout) try: - _LOGGER.debug("Making request: %s %s", method, url) + _helpers.request_log(_LOGGER, method, url, body, headers) connection.request(method, path, body=body, headers=headers, **kwargs) response = connection.getresponse() + _helpers.response_log(_LOGGER, response) return Response(response) except (http_client.HTTPException, socket.error) as caught_exc: diff --git a/google/auth/transport/requests.py b/google/auth/transport/requests.py index 23a69783d..2753912c6 100644 --- a/google/auth/transport/requests.py +++ b/google/auth/transport/requests.py @@ -34,6 +34,7 @@ create_urllib3_context, ) # pylint: disable=ungrouped-imports +from google.auth import _helpers from google.auth import environment_vars from google.auth import exceptions from google.auth import transport @@ -182,10 +183,11 @@ def __call__( google.auth.exceptions.TransportError: If any exception occurred. """ try: - _LOGGER.debug("Making request: %s %s", method, url) + _helpers.request_log(_LOGGER, method, url, body, headers) response = self.session.request( method, url, data=body, headers=headers, timeout=timeout, **kwargs ) + _helpers.response_log(_LOGGER, response) return _Response(response) except requests.exceptions.RequestException as caught_exc: new_exc = exceptions.TransportError(caught_exc) @@ -534,6 +536,7 @@ def request( remaining_time = guard.remaining_timeout with TimeoutGuard(remaining_time) as guard: + _helpers.request_log(_LOGGER, method, url, data, headers) response = super(AuthorizedSession, self).request( method, url, diff --git a/google/auth/transport/urllib3.py b/google/auth/transport/urllib3.py index 63144f5ff..03ed75aa2 100644 --- a/google/auth/transport/urllib3.py +++ b/google/auth/transport/urllib3.py @@ -34,14 +34,23 @@ try: import urllib3 # type: ignore import urllib3.exceptions # type: ignore + from packaging import version # type: ignore except ImportError as caught_exc: # pragma: NO COVER raise ImportError( - "The urllib3 library is not installed from please install the " - "urllib3 package to use the urllib3 transport." + "" + f"Error: {caught_exc}." + " The 'google-auth' library requires the extras installed " + "for urllib3 network transport." + "\n" + "Please install the necessary dependencies using pip:\n" + " pip install google-auth[urllib3]\n" + "\n" + "(Note: Using '[urllib3]' ensures the specific dependencies needed for this feature are installed. " + "We recommend running this command in your virtual environment.)" ) from caught_exc -from packaging import version # type: ignore +from google.auth import _helpers from google.auth import environment_vars from google.auth import exceptions from google.auth import transport @@ -136,10 +145,11 @@ def __call__( kwargs["timeout"] = timeout try: - _LOGGER.debug("Making request: %s %s", method, url) + _helpers.request_log(_LOGGER, method, url, body, headers) response = self.http.request( method, url, body=body, headers=headers, **kwargs ) + _helpers.response_log(_LOGGER, response) return _Response(response) except urllib3.exceptions.HTTPError as caught_exc: new_exc = exceptions.TransportError(caught_exc) @@ -414,7 +424,7 @@ def urlopen(self, method, url, body=None, headers=None, **kwargs): body=body, headers=headers, _credential_refresh_attempt=_credential_refresh_attempt + 1, - **kwargs + **kwargs, ) return response diff --git a/google/auth/version.py b/google/auth/version.py index 41a80e6c6..318bf723b 100644 --- a/google/auth/version.py +++ b/google/auth/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.38.0" +__version__ = "2.40.3" diff --git a/google/oauth2/id_token.py b/google/oauth2/id_token.py index b68ab6b30..a6c51ce63 100644 --- a/google/oauth2/id_token.py +++ b/google/oauth2/id_token.py @@ -284,6 +284,18 @@ def fetch_id_token_credentials(audience, request=None): return service_account.IDTokenCredentials.from_service_account_info( info, target_audience=audience ) + elif info.get("type") == "impersonated_service_account": + from google.auth import impersonated_credentials + + target_credentials = impersonated_credentials.Credentials.from_impersonated_service_account_info( + info + ) + + return impersonated_credentials.IDTokenCredentials( + target_credentials=target_credentials, + target_audience=audience, + include_email=True, + ) except ValueError as caught_exc: new_exc = exceptions.DefaultCredentialsError( "GOOGLE_APPLICATION_CREDENTIALS is not valid service account credentials.", diff --git a/google/oauth2/webauthn_types.py b/google/oauth2/webauthn_types.py index 7784e83d0..24e984f3d 100644 --- a/google/oauth2/webauthn_types.py +++ b/google/oauth2/webauthn_types.py @@ -67,7 +67,7 @@ class GetRequest: extensions: Optional[AuthenticationExtensionsClientInputs] = None def to_json(self) -> str: - req_options: Dict[str, Any] = {"rpid": self.rpid, "challenge": self.challenge} + req_options: Dict[str, Any] = {"rpId": self.rpid, "challenge": self.challenge} if self.timeout_ms: req_options["timeout"] = self.timeout_ms if self.allow_credentials: diff --git a/noxfile.py b/noxfile.py index 07cef9bcc..1071be0ad 100644 --- a/noxfile.py +++ b/noxfile.py @@ -84,13 +84,12 @@ def mypy(session): session.run("mypy", "-p", "google", "-p", "tests", "-p", "tests_async") -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]) +@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]) def unit(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-r", "testing/requirements.txt", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + session.install("-e", ".[testing]", "-c", constraints_path) session.run( "pytest", f"--junitxml=unit_{session.python}_sponge_log.xml", @@ -105,8 +104,7 @@ def unit(session): @nox.session(python="3.8") def cover(session): - session.install("-r", "testing/requirements.txt") - session.install("-e", ".") + session.install("-e", ".[testing]") session.run( "pytest", "--cov=google.auth", @@ -144,8 +142,7 @@ def docs(session): @nox.session(python="pypy") def pypy(session): - session.install("-r", "testing/requirements.txt") - session.install("-e", ".") + session.install("-e", ".[testing]") session.run( "pytest", f"--junitxml=unit_{session.python}_sponge_log.xml", diff --git a/renovate.json b/renovate.json index 39b2a0ec9..c7875c469 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/samples/cloud-client/snippets/noxfile.py b/samples/cloud-client/snippets/noxfile.py index f5d827941..c21466d4f 100644 --- a/samples/cloud-client/snippets/noxfile.py +++ b/samples/cloud-client/snippets/noxfile.py @@ -60,7 +60,7 @@ ] -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]) +@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]) def unit(session): # constraints_path = str( # CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" diff --git a/samples/cloud-client/snippets/requirements.txt b/samples/cloud-client/snippets/requirements.txt index d661b7fc9..416c56b94 100644 --- a/samples/cloud-client/snippets/requirements.txt +++ b/samples/cloud-client/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-compute==1.5.1 -google-cloud-storage==2.5.0 -google-auth==2.11.0 -pytest==7.1.2 \ No newline at end of file +google-cloud-storage==3.1.0 +google-auth==2.38.0 +pytest==7.1.2 diff --git a/setup.py b/setup.py index b5c7e627c..3874354fd 100644 --- a/setup.py +++ b/setup.py @@ -27,13 +27,70 @@ "rsa>=3.1.4,<5", ) +# TODO(https://github.com/googleapis/google-auth-library-python/issues/1737): Unit test fails with +# `No module named 'cryptography.hazmat.backends.openssl.x509' for Python 3.7``. +cryptography_base_require = [ + "cryptography >= 38.0.3", + "cryptography < 39.0.0; python_version < '3.8'", +] + +requests_extra_require = ["requests >= 2.20.0, < 3.0.0"] + +aiohttp_extra_require = ["aiohttp >= 3.6.2, < 4.0.0", *requests_extra_require] + +pyjwt_extra_require = ["pyjwt>=2.0", *cryptography_base_require] + +reauth_extra_require = ["pyu2f>=0.1.5"] + +# TODO(https://github.com/googleapis/google-auth-library-python/issues/1738): Add bounds for cryptography and pyopenssl dependencies. +enterprise_cert_extra_require = ["cryptography", "pyopenssl"] + +pyopenssl_extra_require = ["pyopenssl>=20.0.0", cryptography_base_require] + +# TODO(https://github.com/googleapis/google-auth-library-python/issues/1739): Add bounds for urllib3 and packaging dependencies. +urllib3_extra_require = ["urllib3", "packaging"] + +# Unit test requirements. +testing_extra_require = [ + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1735): Remove `grpcio` from testing requirements once an extra is added for `grpcio` dependency. + "grpcio", + "flask", + "freezegun", + "mock", + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1736): Remove `oauth2client` from testing requirements once an extra is added for `oauth2client` dependency. + "oauth2client", + *pyjwt_extra_require, + "pytest", + "pytest-cov", + "pytest-localserver", + *pyopenssl_extra_require, + *reauth_extra_require, + "responses", + *urllib3_extra_require, + # Async Dependencies + *aiohttp_extra_require, + "aioresponses", + "pytest-asyncio", + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1665): Remove the pinned version of pyopenssl + # once `TestDecryptPrivateKey::test_success` is updated to remove the deprecated `OpenSSL.crypto.sign` and + # `OpenSSL.crypto.verify` methods. See: https://www.pyopenssl.org/en/latest/changelog.html#id3. + "pyopenssl < 24.3.0", + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1722): `test_aiohttp_requests` depend on + # aiohttp < 3.10.0 which is a bug. Investigate and remove the pinned aiohttp version. + "aiohttp < 3.10.0", +] + extras = { - "aiohttp": ["aiohttp >= 3.6.2, < 4.0.0.dev0", "requests >= 2.20.0, < 3.0.0.dev0"], - "pyopenssl": ["pyopenssl>=20.0.0", "cryptography>=38.0.3"], - "requests": "requests >= 2.20.0, < 3.0.0.dev0", - "reauth": "pyu2f>=0.1.5", - "enterprise_cert": ["cryptography", "pyopenssl"], - "pyjwt": ["pyjwt>=2.0", "cryptography>=38.0.3"], + "aiohttp": aiohttp_extra_require, + "enterprise_cert": enterprise_cert_extra_require, + "pyopenssl": pyopenssl_extra_require, + "pyjwt": pyjwt_extra_require, + "reauth": reauth_extra_require, + "requests": requests_extra_require, + "testing": testing_extra_require, + "urllib3": urllib3_extra_require, + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1735): Add an extra for `grpcio` dependency. + # TODO(https://github.com/googleapis/google-auth-library-python/issues/1736): Add an extra for `oauth2client` dependency. } with io.open("README.rst", "r") as fh: @@ -71,6 +128,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", diff --git a/system_tests/secrets.tar.enc b/system_tests/secrets.tar.enc index bcc3f042d..4e56a6404 100644 Binary files a/system_tests/secrets.tar.enc and b/system_tests/secrets.tar.enc differ diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/requirements.txt b/testing/requirements.txt deleted file mode 100644 index 742c4a46d..000000000 --- a/testing/requirements.txt +++ /dev/null @@ -1,21 +0,0 @@ -# Unit test requirements -flask -freezegun -mock -oauth2client -pyopenssl -pytest -pytest-cov -pytest-localserver -pyu2f -pyjwt -requests -urllib3 -cryptography < 39.0.0 -responses -grpcio -# Async Dependencies -pytest-asyncio; python_version > '3.0' -aioresponses; python_version > '3.0' -asynctest; python_version > '3.0' -aiohttp < 3.10.0; python_version > '3.0' diff --git a/tests/aio/test__helpers.py b/tests/aio/test__helpers.py new file mode 100644 index 000000000..829dc97ff --- /dev/null +++ b/tests/aio/test__helpers.py @@ -0,0 +1,106 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +import pytest # type: ignore + +from google.auth.aio import _helpers + +# _MOCK_BASE_LOGGER_NAME is the base logger namespace used for testing. +_MOCK_BASE_LOGGER_NAME = "foogle" + +# _MOCK_CHILD_LOGGER_NAME is the child logger namespace used for testing. +_MOCK_CHILD_LOGGER_NAME = "foogle.bar" + + +@pytest.fixture +def logger(): + """Returns a child logger for testing.""" + logger = logging.getLogger(_MOCK_CHILD_LOGGER_NAME) + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + return logger + + +@pytest.fixture +def base_logger(): + """Returns a child logger for testing.""" + logger = logging.getLogger(_MOCK_BASE_LOGGER_NAME) + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + return logger + + +@pytest.mark.asyncio +async def test_response_log_debug_enabled(logger, caplog, base_logger): + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + await _helpers.response_log_async(logger, {"payload": None}) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Response received..." + assert record.httpResponse == "" + + +@pytest.mark.asyncio +async def test_response_log_debug_disabled(logger, caplog, base_logger): + caplog.set_level(logging.INFO, logger=_MOCK_CHILD_LOGGER_NAME) + await _helpers.response_log_async(logger, "another_response") + assert "Response received..." not in caplog.text + + +@pytest.mark.asyncio +async def test_response_log_debug_enabled_response_json(logger, caplog, base_logger): + response = None + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + await _helpers.response_log_async(logger, response) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Response received..." + assert record.httpResponse == "" + + +@pytest.mark.asyncio +async def test_parse_response_async_json_valid(): + class MockResponse: + async def json(self): + return {"data": "test"} + + response = MockResponse() + expected = {"data": "test"} + assert await _helpers._parse_response_async(response) == expected + + +@pytest.mark.asyncio +async def test_parse_response_async_json_invalid(): + class MockResponse: + def json(self): + raise json.JSONDecodeError("msg", "doc", 0) + + response = MockResponse() + assert await _helpers._parse_response_async(response) is None + + +@pytest.mark.asyncio +async def test_parse_response_async_no_json_method(): + response = "plain text" + assert await _helpers._parse_response_async(response) is None + + +@pytest.mark.asyncio +async def test_parse_response_async_none(): + assert await _helpers._parse_response_async(None) is None diff --git a/tests/compute_engine/test__metadata.py b/tests/compute_engine/test__metadata.py index 7c028eb62..c90bc603a 100644 --- a/tests/compute_engine/test__metadata.py +++ b/tests/compute_engine/test__metadata.py @@ -174,6 +174,7 @@ def test_get_success_json(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result[key] == value @@ -192,6 +193,7 @@ def test_get_success_json_content_type_charset(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result[key] == value @@ -211,6 +213,7 @@ def test_get_success_retry(mock_sleep): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert request.call_count == 2 assert result[key] == value @@ -226,6 +229,7 @@ def test_get_success_text(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result == data @@ -241,6 +245,7 @@ def test_get_success_params(): method="GET", url=_metadata._METADATA_ROOT + PATH + "?recursive=true", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result == data @@ -255,6 +260,7 @@ def test_get_success_recursive_and_params(): method="GET", url=_metadata._METADATA_ROOT + PATH + "?recursive=true", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result == data @@ -269,6 +275,7 @@ def test_get_success_recursive(): method="GET", url=_metadata._METADATA_ROOT + PATH + "?recursive=true", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert result == data @@ -290,6 +297,7 @@ def test_get_success_custom_root_new_variable(): method="GET", url="http://{}/computeMetadata/v1/{}".format(fake_root, PATH), headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -310,6 +318,7 @@ def test_get_success_custom_root_old_variable(): method="GET", url="http://{}/computeMetadata/v1/{}".format(fake_root, PATH), headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -326,6 +335,7 @@ def test_get_failure(mock_sleep): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -338,6 +348,7 @@ def test_get_return_none_for_not_found_error(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -357,6 +368,7 @@ def test_get_failure_connection_failed(mock_sleep): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert request.call_count == 5 @@ -375,6 +387,7 @@ def test_get_too_many_requests_retryable_error_failure(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert request.call_count == 5 @@ -391,6 +404,7 @@ def test_get_failure_bad_json(): method="GET", url=_metadata._METADATA_ROOT + PATH, headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -404,6 +418,7 @@ def test_get_project_id(): method="GET", url=_metadata._METADATA_ROOT + "project/project-id", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert project_id == project @@ -419,6 +434,7 @@ def test_get_universe_domain_success(): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert universe_domain == "fake_universe_domain" @@ -432,6 +448,7 @@ def test_get_universe_domain_success_empty_response(): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert universe_domain == "googleapis.com" @@ -447,6 +464,7 @@ def test_get_universe_domain_not_found(): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert universe_domain == "googleapis.com" @@ -467,6 +485,7 @@ def test_get_universe_domain_retryable_error_failure(): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert request.call_count == 5 @@ -509,11 +528,13 @@ def request(self, *args, **kwargs): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) request_ok.assert_called_once_with( method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert universe_domain == "fake_universe_domain" @@ -533,6 +554,7 @@ def test_get_universe_domain_other_error(): method="GET", url=_metadata._METADATA_ROOT + "universe/universe-domain", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) @@ -557,6 +579,7 @@ def test_get_service_account_token(utcnow, mock_metrics_header_value): "metadata-flavor": "Google", "x-goog-api-client": ACCESS_TOKEN_REQUEST_METRICS_HEADER_VALUE, }, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert token == "token" assert expiry == utcnow() + datetime.timedelta(seconds=ttl) @@ -583,6 +606,7 @@ def test_get_service_account_token_with_scopes_list(utcnow, mock_metrics_header_ "metadata-flavor": "Google", "x-goog-api-client": ACCESS_TOKEN_REQUEST_METRICS_HEADER_VALUE, }, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert token == "token" assert expiry == utcnow() + datetime.timedelta(seconds=ttl) @@ -611,6 +635,7 @@ def test_get_service_account_token_with_scopes_string( "metadata-flavor": "Google", "x-goog-api-client": ACCESS_TOKEN_REQUEST_METRICS_HEADER_VALUE, }, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert token == "token" assert expiry == utcnow() + datetime.timedelta(seconds=ttl) @@ -628,6 +653,7 @@ def test_get_service_account_info(): method="GET", url=_metadata._METADATA_ROOT + PATH + "/?recursive=true", headers=_metadata._METADATA_HEADERS, + timeout=_metadata._METADATA_DEFAULT_TIMEOUT, ) assert info[key] == value diff --git a/tests/compute_engine/test_credentials.py b/tests/compute_engine/test_credentials.py index fddfb7f64..03fe845b1 100644 --- a/tests/compute_engine/test_credentials.py +++ b/tests/compute_engine/test_credentials.py @@ -501,7 +501,7 @@ def test_with_target_audience_integration(self): responses.add( responses.GET, "http://metadata.google.internal/computeMetadata/v1/instance/" - "service-accounts/service-account@example.com/token", + "service-accounts/default/token", status=200, content_type="application/json", json={ @@ -659,7 +659,7 @@ def test_with_quota_project_integration(self): responses.add( responses.GET, "http://metadata.google.internal/computeMetadata/v1/instance/" - "service-accounts/service-account@example.com/token", + "service-accounts/default/token", status=200, content_type="application/json", json={ diff --git a/tests/data/trust_chain_with_leaf.pem b/tests/data/trust_chain_with_leaf.pem new file mode 100644 index 000000000..250387d9d --- /dev/null +++ b/tests/data/trust_chain_with_leaf.pem @@ -0,0 +1,52 @@ +-----BEGIN CERTIFICATE----- +MIIDIzCCAgugAwIBAgIJAMfISuBQ5m+5MA0GCSqGSIb3DQEBBQUAMBUxEzARBgNV +BAMTCnVuaXQtdGVzdHMwHhcNMTExMjA2MTYyNjAyWhcNMjExMjAzMTYyNjAyWjAV +MRMwEQYDVQQDEwp1bml0LXRlc3RzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEA4ej0p7bQ7L/r4rVGUz9RN4VQWoej1Bg1mYWIDYslvKrk1gpj7wZgkdmM +7oVK2OfgrSj/FCTkInKPqaCR0gD7K80q+mLBrN3PUkDrJQZpvRZIff3/xmVU1Wer +uQLFJjnFb2dqu0s/FY/2kWiJtBCakXvXEOb7zfbINuayL+MSsCGSdVYsSliS5qQp +gyDap+8b5fpXZVJkq92hrcNtbkg7hCYUJczt8n9hcCTJCfUpApvaFQ18pe+zpyl4 ++WzkP66I28hniMQyUlA1hBiskT7qiouq0m8IOodhv2fagSZKjOTTU2xkSBc//fy3 +ZpsL7WqgsZS7Q+0VRK8gKfqkxg5OYQIDAQABo3YwdDAdBgNVHQ4EFgQU2RQ8yO+O +gN8oVW2SW7RLrfYd9jEwRQYDVR0jBD4wPIAU2RQ8yO+OgN8oVW2SW7RLrfYd9jGh +GaQXMBUxEzARBgNVBAMTCnVuaXQtdGVzdHOCCQDHyErgUOZvuTAMBgNVHRMEBTAD +AQH/MA0GCSqGSIb3DQEBBQUAA4IBAQBRv+M/6+FiVu7KXNjFI5pSN17OcW5QUtPr +odJMlWrJBtynn/TA1oJlYu3yV5clc/71Vr/AxuX5xGP+IXL32YDF9lTUJXG/uUGk ++JETpKmQviPbRsvzYhz4pf6ZIOZMc3/GIcNq92ECbseGO+yAgyWUVKMmZM0HqXC9 +ovNslqe0M8C1sLm1zAR5z/h/litE7/8O2ietija3Q/qtl2TOXJdCA6sgjJX2WUql +ybrC55ct18NKf3qhpcEkGQvFU40rVYApJpi98DiZPYFdx1oBDp/f4uZ3ojpxRVFT +cDwcJLfNRCPUhormsY7fDS9xSyThiHsW9mjJYdcaKQkwYZ0F11yB +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIJAPBsLZmNGfKtMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTYwOTIxMDI0NTEyWhcNMTYxMDIxMDI0NTEyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAsiMC7mTsmUXwZoYlT4aHY1FLw8bxIXC+z3IqA+TY1WqfbeiZRo8MA5Zx +lTTxYMKPCZUE1XBc7jvD8GJhWIj6pToPYHn73B01IBkLBxq4kF1yV2Z7DVmkvc6H +EcxXXq8zkCx0j6XOfiI4+qkXnuQn8cvrk8xfhtnMMZM7iVm6VSN93iRP/8ey6xuL +XTHrDX7ukoRce1hpT8O+15GXNrY0irhhYQz5xKibNCJF3EjV28WMry8y7I8uYUFU +RWDiQawwK9ec1zhZ94v92+GZDlPevmcFmSERKYQ0NsKcT0Y3lGuGnaExs8GyOpnC +oksu4YJGXQjg7lkv4MxzsNbRqmCkUwxw1Mg6FP0tsCNsw9qTrkvWCRA9zp/aU+sZ +IBGh1t4UGCub8joeQFvHxvr/3F7mH/dyvCjA34u0Lo1VPx+jYUIi9i0odltMspDW +xOpjqdGARZYmlJP5Au9q5cQjPMcwS/EBIb8cwNl32mUE6WnFlep+38mNR/FghIjO +ViAkXuKQmcHe6xppZAoHFsO/t3l4Tjek5vNW7erI1rgrFku/fvkIW/G8V1yIm/+Q +F+CE4maQzCJfhftpkhM/sPC/FuLNBmNE8BHVX8y58xG4is/cQxL4Z9TsFIw0C5+3 +uTrFW9D0agysahMVzPGtCqhDQqJdIJrBQqlS6bztpzBA8zEI0skCAwEAAaOBpzCB +pDAdBgNVHQ4EFgQUz/8FmW6TfqXyNJZr7rhc+Tn5sKQwdQYDVR0jBG4wbIAUz/8F +mW6TfqXyNJZr7rhc+Tn5sKShSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpT +b21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDw +bC2ZjRnyrTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4ICAQCQmrcfhurX +riR3Q0Y+nq040/3dJIAJXjyI9CEtxaU0nzCNTng7PwgZ0CKmCelQfInuwWFwBSHS +6kBfC1rgJeFnjnTt8a3RCgRlIgUr9NCdPSEccB7TurobwPJ2h6cJjjR8urcb0CXh +CEMvPneyPj0xUFY8vVKXMGWahz/kyfwIiVqcX/OtMZ29fUu1onbWl71g2gVLtUZl +sECdZ+AC/6HDCVpYIVETMl1T7N/XyqXZQiDLDNRDeZhnapz8w9fsW1KVujAZLNQR +pVnw2qa2UK1dSf2FHX+lQU5mFSYM4vtwaMlX/LgfdLZ9I796hFh619WwTVz+LO2N +vHnwBMabld3XSPuZRqlbBulDQ07Vbqdjv8DYSLA2aKI4ZkMMKuFLG/oS28V2ZYmv +/KpGEs5UgKY+P9NulYpTDwCU/6SomuQpP795wbG6sm7Hzq82r2RmB61GupNRGeqi +pXKsy69T388zBxYu6zQrosXiDl5YzaViH7tm0J7opye8dCWjjpnahki0vq2znti7 +6cWla2j8Xz1glvLz+JI/NCOMfxUInb82T7ijo80N0VJ2hzf7p2GxRZXAxAV9knLI +nM4F5TLjSd7ZhOOZ7ni/eZFueTMisWfypt2nc41whGjHMX/Zp1kPfhB4H2bLKIX/ +lSrwNr3qbGTEJX8JqpDBNVAd96XkMvDNyA== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/tests/data/trust_chain_without_leaf.pem b/tests/data/trust_chain_without_leaf.pem new file mode 100644 index 000000000..9da0f37fe --- /dev/null +++ b/tests/data/trust_chain_without_leaf.pem @@ -0,0 +1,33 @@ +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIJAPBsLZmNGfKtMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTYwOTIxMDI0NTEyWhcNMTYxMDIxMDI0NTEyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAsiMC7mTsmUXwZoYlT4aHY1FLw8bxIXC+z3IqA+TY1WqfbeiZRo8MA5Zx +lTTxYMKPCZUE1XBc7jvD8GJhWIj6pToPYHn73B01IBkLBxq4kF1yV2Z7DVmkvc6H +EcxXXq8zkCx0j6XOfiI4+qkXnuQn8cvrk8xfhtnMMZM7iVm6VSN93iRP/8ey6xuL +XTHrDX7ukoRce1hpT8O+15GXNrY0irhhYQz5xKibNCJF3EjV28WMry8y7I8uYUFU +RWDiQawwK9ec1zhZ94v92+GZDlPevmcFmSERKYQ0NsKcT0Y3lGuGnaExs8GyOpnC +oksu4YJGXQjg7lkv4MxzsNbRqmCkUwxw1Mg6FP0tsCNsw9qTrkvWCRA9zp/aU+sZ +IBGh1t4UGCub8joeQFvHxvr/3F7mH/dyvCjA34u0Lo1VPx+jYUIi9i0odltMspDW +xOpjqdGARZYmlJP5Au9q5cQjPMcwS/EBIb8cwNl32mUE6WnFlep+38mNR/FghIjO +ViAkXuKQmcHe6xppZAoHFsO/t3l4Tjek5vNW7erI1rgrFku/fvkIW/G8V1yIm/+Q +F+CE4maQzCJfhftpkhM/sPC/FuLNBmNE8BHVX8y58xG4is/cQxL4Z9TsFIw0C5+3 +uTrFW9D0agysahMVzPGtCqhDQqJdIJrBQqlS6bztpzBA8zEI0skCAwEAAaOBpzCB +pDAdBgNVHQ4EFgQUz/8FmW6TfqXyNJZr7rhc+Tn5sKQwdQYDVR0jBG4wbIAUz/8F +mW6TfqXyNJZr7rhc+Tn5sKShSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpT +b21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDw +bC2ZjRnyrTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4ICAQCQmrcfhurX +riR3Q0Y+nq040/3dJIAJXjyI9CEtxaU0nzCNTng7PwgZ0CKmCelQfInuwWFwBSHS +6kBfC1rgJeFnjnTt8a3RCgRlIgUr9NCdPSEccB7TurobwPJ2h6cJjjR8urcb0CXh +CEMvPneyPj0xUFY8vVKXMGWahz/kyfwIiVqcX/OtMZ29fUu1onbWl71g2gVLtUZl +sECdZ+AC/6HDCVpYIVETMl1T7N/XyqXZQiDLDNRDeZhnapz8w9fsW1KVujAZLNQR +pVnw2qa2UK1dSf2FHX+lQU5mFSYM4vtwaMlX/LgfdLZ9I796hFh619WwTVz+LO2N +vHnwBMabld3XSPuZRqlbBulDQ07Vbqdjv8DYSLA2aKI4ZkMMKuFLG/oS28V2ZYmv +/KpGEs5UgKY+P9NulYpTDwCU/6SomuQpP795wbG6sm7Hzq82r2RmB61GupNRGeqi +pXKsy69T388zBxYu6zQrosXiDl5YzaViH7tm0J7opye8dCWjjpnahki0vq2znti7 +6cWla2j8Xz1glvLz+JI/NCOMfxUInb82T7ijo80N0VJ2hzf7p2GxRZXAxAV9knLI +nM4F5TLjSd7ZhOOZ7ni/eZFueTMisWfypt2nc41whGjHMX/Zp1kPfhB4H2bLKIX/ +lSrwNr3qbGTEJX8JqpDBNVAd96XkMvDNyA== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/tests/data/trust_chain_wrong_order.pem b/tests/data/trust_chain_wrong_order.pem new file mode 100644 index 000000000..e8dc5d359 --- /dev/null +++ b/tests/data/trust_chain_wrong_order.pem @@ -0,0 +1,52 @@ +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIJAPBsLZmNGfKtMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTYwOTIxMDI0NTEyWhcNMTYxMDIxMDI0NTEyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAsiMC7mTsmUXwZoYlT4aHY1FLw8bxIXC+z3IqA+TY1WqfbeiZRo8MA5Zx +lTTxYMKPCZUE1XBc7jvD8GJhWIj6pToPYHn73B01IBkLBxq4kF1yV2Z7DVmkvc6H +EcxXXq8zkCx0j6XOfiI4+qkXnuQn8cvrk8xfhtnMMZM7iVm6VSN93iRP/8ey6xuL +XTHrDX7ukoRce1hpT8O+15GXNrY0irhhYQz5xKibNCJF3EjV28WMry8y7I8uYUFU +RWDiQawwK9ec1zhZ94v92+GZDlPevmcFmSERKYQ0NsKcT0Y3lGuGnaExs8GyOpnC +oksu4YJGXQjg7lkv4MxzsNbRqmCkUwxw1Mg6FP0tsCNsw9qTrkvWCRA9zp/aU+sZ +IBGh1t4UGCub8joeQFvHxvr/3F7mH/dyvCjA34u0Lo1VPx+jYUIi9i0odltMspDW +xOpjqdGARZYmlJP5Au9q5cQjPMcwS/EBIb8cwNl32mUE6WnFlep+38mNR/FghIjO +ViAkXuKQmcHe6xppZAoHFsO/t3l4Tjek5vNW7erI1rgrFku/fvkIW/G8V1yIm/+Q +F+CE4maQzCJfhftpkhM/sPC/FuLNBmNE8BHVX8y58xG4is/cQxL4Z9TsFIw0C5+3 +uTrFW9D0agysahMVzPGtCqhDQqJdIJrBQqlS6bztpzBA8zEI0skCAwEAAaOBpzCB +pDAdBgNVHQ4EFgQUz/8FmW6TfqXyNJZr7rhc+Tn5sKQwdQYDVR0jBG4wbIAUz/8F +mW6TfqXyNJZr7rhc+Tn5sKShSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpT +b21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDw +bC2ZjRnyrTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4ICAQCQmrcfhurX +riR3Q0Y+nq040/3dJIAJXjyI9CEtxaU0nzCNTng7PwgZ0CKmCelQfInuwWFwBSHS +6kBfC1rgJeFnjnTt8a3RCgRlIgUr9NCdPSEccB7TurobwPJ2h6cJjjR8urcb0CXh +CEMvPneyPj0xUFY8vVKXMGWahz/kyfwIiVqcX/OtMZ29fUu1onbWl71g2gVLtUZl +sECdZ+AC/6HDCVpYIVETMl1T7N/XyqXZQiDLDNRDeZhnapz8w9fsW1KVujAZLNQR +pVnw2qa2UK1dSf2FHX+lQU5mFSYM4vtwaMlX/LgfdLZ9I796hFh619WwTVz+LO2N +vHnwBMabld3XSPuZRqlbBulDQ07Vbqdjv8DYSLA2aKI4ZkMMKuFLG/oS28V2ZYmv +/KpGEs5UgKY+P9NulYpTDwCU/6SomuQpP795wbG6sm7Hzq82r2RmB61GupNRGeqi +pXKsy69T388zBxYu6zQrosXiDl5YzaViH7tm0J7opye8dCWjjpnahki0vq2znti7 +6cWla2j8Xz1glvLz+JI/NCOMfxUInb82T7ijo80N0VJ2hzf7p2GxRZXAxAV9knLI +nM4F5TLjSd7ZhOOZ7ni/eZFueTMisWfypt2nc41whGjHMX/Zp1kPfhB4H2bLKIX/ +lSrwNr3qbGTEJX8JqpDBNVAd96XkMvDNyA== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIDIzCCAgugAwIBAgIJAMfISuBQ5m+5MA0GCSqGSIb3DQEBBQUAMBUxEzARBgNV +BAMTCnVuaXQtdGVzdHMwHhcNMTExMjA2MTYyNjAyWhcNMjExMjAzMTYyNjAyWjAV +MRMwEQYDVQQDEwp1bml0LXRlc3RzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEA4ej0p7bQ7L/r4rVGUz9RN4VQWoej1Bg1mYWIDYslvKrk1gpj7wZgkdmM +7oVK2OfgrSj/FCTkInKPqaCR0gD7K80q+mLBrN3PUkDrJQZpvRZIff3/xmVU1Wer +uQLFJjnFb2dqu0s/FY/2kWiJtBCakXvXEOb7zfbINuayL+MSsCGSdVYsSliS5qQp +gyDap+8b5fpXZVJkq92hrcNtbkg7hCYUJczt8n9hcCTJCfUpApvaFQ18pe+zpyl4 ++WzkP66I28hniMQyUlA1hBiskT7qiouq0m8IOodhv2fagSZKjOTTU2xkSBc//fy3 +ZpsL7WqgsZS7Q+0VRK8gKfqkxg5OYQIDAQABo3YwdDAdBgNVHQ4EFgQU2RQ8yO+O +gN8oVW2SW7RLrfYd9jEwRQYDVR0jBD4wPIAU2RQ8yO+OgN8oVW2SW7RLrfYd9jGh +GaQXMBUxEzARBgNVBAMTCnVuaXQtdGVzdHOCCQDHyErgUOZvuTAMBgNVHRMEBTAD +AQH/MA0GCSqGSIb3DQEBBQUAA4IBAQBRv+M/6+FiVu7KXNjFI5pSN17OcW5QUtPr +odJMlWrJBtynn/TA1oJlYu3yV5clc/71Vr/AxuX5xGP+IXL32YDF9lTUJXG/uUGk ++JETpKmQviPbRsvzYhz4pf6ZIOZMc3/GIcNq92ECbseGO+yAgyWUVKMmZM0HqXC9 +ovNslqe0M8C1sLm1zAR5z/h/litE7/8O2ietija3Q/qtl2TOXJdCA6sgjJX2WUql +ybrC55ct18NKf3qhpcEkGQvFU40rVYApJpi98DiZPYFdx1oBDp/f4uZ3ojpxRVFT +cDwcJLfNRCPUhormsY7fDS9xSyThiHsW9mjJYdcaKQkwYZ0F11yB +-----END CERTIFICATE----- \ No newline at end of file diff --git a/tests/oauth2/test_id_token.py b/tests/oauth2/test_id_token.py index 7d6a22481..ff3d4b6d8 100644 --- a/tests/oauth2/test_id_token.py +++ b/tests/oauth2/test_id_token.py @@ -20,6 +20,7 @@ from google.auth import environment_vars from google.auth import exceptions +from google.auth import impersonated_credentials from google.auth import transport from google.oauth2 import id_token from google.oauth2 import service_account @@ -27,6 +28,12 @@ SERVICE_ACCOUNT_FILE = os.path.join( os.path.dirname(__file__), "../data/service_account.json" ) + +IMPERSONATED_SERVICE_ACCOUNT_FILE = os.path.join( + os.path.dirname(__file__), + "../data/impersonated_service_account_authorized_user_source.json", +) + ID_TOKEN_AUDIENCE = "https://pubsub.googleapis.com" @@ -262,6 +269,14 @@ def test_fetch_id_token_credentials_from_explicit_cred_json_file(monkeypatch): assert cred._target_audience == ID_TOKEN_AUDIENCE +def test_fetch_id_token_credentials_from_impersonated_cred_json_file(monkeypatch): + monkeypatch.setenv(environment_vars.CREDENTIALS, IMPERSONATED_SERVICE_ACCOUNT_FILE) + + cred = id_token.fetch_id_token_credentials(ID_TOKEN_AUDIENCE) + assert isinstance(cred, impersonated_credentials.IDTokenCredentials) + assert cred._target_audience == ID_TOKEN_AUDIENCE + + def test_fetch_id_token_credentials_no_cred_exists(monkeypatch): monkeypatch.delenv(environment_vars.CREDENTIALS, raising=False) diff --git a/tests/oauth2/test_webauthn_handler.py b/tests/oauth2/test_webauthn_handler.py index 454e97cb6..9fba266da 100644 --- a/tests/oauth2/test_webauthn_handler.py +++ b/tests/oauth2/test_webauthn_handler.py @@ -118,7 +118,7 @@ def test_success_get_assertion(os_get_stub, subprocess_run_stub): "type": "get", "origin": "fake_origin", "requestData": { - "rpid": "fake_rpid", + "rpId": "fake_rpid", "challenge": "fake_challenge", "allowCredentials": [{"type": "public-key", "id": "fake_id_1"}], }, diff --git a/tests/oauth2/test_webauthn_types.py b/tests/oauth2/test_webauthn_types.py index 5231d2189..bafe5b050 100644 --- a/tests/oauth2/test_webauthn_types.py +++ b/tests/oauth2/test_webauthn_types.py @@ -82,7 +82,7 @@ def test_GetRequest(has_allow_credentials): "type": "get", "origin": "fake_origin", "requestData": { - "rpid": "fake_rpid", + "rpId": "fake_rpid", "timeout": 123, "challenge": "fake_challenge", "userVerification": "preferred", diff --git a/tests/test__helpers.py b/tests/test__helpers.py index c9a3847ac..a4337c016 100644 --- a/tests/test__helpers.py +++ b/tests/test__helpers.py @@ -13,12 +13,50 @@ # limitations under the License. import datetime +import json +import logging +from unittest import mock import urllib import pytest # type: ignore from google.auth import _helpers +# _MOCK_BASE_LOGGER_NAME is the base logger namespace used for testing. +_MOCK_BASE_LOGGER_NAME = "foogle" + +# _MOCK_CHILD_LOGGER_NAME is the child logger namespace used for testing. +_MOCK_CHILD_LOGGER_NAME = "foogle.bar" + + +@pytest.fixture +def logger(): + """Returns a child logger for testing.""" + logger = logging.getLogger(_MOCK_CHILD_LOGGER_NAME) + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + return logger + + +@pytest.fixture +def base_logger(): + """Returns a child logger for testing.""" + logger = logging.getLogger(_MOCK_BASE_LOGGER_NAME) + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + return logger + + +@pytest.fixture(autouse=True) +def reset_logging_initialized(): + """Resets the global _LOGGING_INITIALIZED variable before each test.""" + original_state = _helpers._LOGGING_INITIALIZED + _helpers._LOGGING_INITIALIZED = False + yield + _helpers._LOGGING_INITIALIZED = original_state + class SourceClass(object): def func(self): # pragma: NO COVER @@ -92,7 +130,7 @@ def test_to_bytes_with_bytes(): def test_to_bytes_with_unicode(): - value = u"string-val" + value = "string-val" encoded_value = b"string-val" assert _helpers.to_bytes(value) == encoded_value @@ -103,13 +141,13 @@ def test_to_bytes_with_nonstring_type(): def test_from_bytes_with_unicode(): - value = u"bytes-val" + value = "bytes-val" assert _helpers.from_bytes(value) == value def test_from_bytes_with_bytes(): value = b"string-val" - decoded_value = u"string-val" + decoded_value = "string-val" assert _helpers.from_bytes(value) == decoded_value @@ -194,3 +232,393 @@ def test_unpadded_urlsafe_b64encode(): for case, expected in cases: assert _helpers.unpadded_urlsafe_b64encode(case) == expected + + +def test_hash_sensitive_info_basic(): + test_data = { + "expires_in": 3599, + "access_token": "access-123", + "scope": "https://www.googleapis.com/auth/test-api", + "token_type": "Bearer", + } + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data["expires_in"] == 3599 + assert hashed_data["scope"] == "https://www.googleapis.com/auth/test-api" + assert hashed_data["access_token"].startswith("hashed_access_token-") + assert hashed_data["token_type"] == "Bearer" + + +def test_hash_sensitive_info_multiple_sensitive(): + test_data = { + "access_token": "some_long_token", + "id_token": "1234-5678-9012-3456", + "expires_in": 3599, + "token_type": "Bearer", + } + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data["expires_in"] == 3599 + assert hashed_data["token_type"] == "Bearer" + assert hashed_data["access_token"].startswith("hashed_access_token-") + assert hashed_data["id_token"].startswith("hashed_id_token-") + + +def test_hash_sensitive_info_none_value(): + test_data = {"username": "user3", "secret": None, "normal_data": "abc"} + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data["secret"] is None + assert hashed_data["normal_data"] == "abc" + + +def test_hash_sensitive_info_non_string_value(): + test_data = {"username": "user4", "access_token": 12345, "normal_data": "def"} + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data["access_token"].startswith("hashed_access_token-") + assert hashed_data["normal_data"] == "def" + + +def test_hash_sensitive_info_list_value(): + test_data = [ + {"name": "Alice", "access_token": "12345"}, + {"name": "Bob", "client_id": "1141"}, + ] + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data[0]["access_token"].startswith("hashed_access_token-") + assert hashed_data[1]["client_id"].startswith("hashed_client_id-") + + +def test_hash_sensitive_info_nested_list_value(): + test_data = [{"names": ["Alice", "Bob"], "tokens": [{"access_token": "1234"}]}] + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data[0]["tokens"][0]["access_token"].startswith( + "hashed_access_token-" + ) + + +def test_hash_sensitive_info_int_value(): + test_data = 123 + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data == "" + + +def test_hash_sensitive_info_bool_value(): + test_data = True + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data == "" + + +def test_hash_sensitive_info_byte_value(): + test_data = b"1243" + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data == "" + + +def test_hash_sensitive_info_empty_dict(): + test_data = {} + hashed_data = _helpers._hash_sensitive_info(test_data) + assert hashed_data == {} + + +def test_hash_value_consistent_hashing(): + value = "test_value" + field_name = "test_field" + hash1 = _helpers._hash_value(value, field_name) + hash2 = _helpers._hash_value(value, field_name) + assert hash1 == hash2 + + +def test_hash_value_different_hashing(): + value1 = "test_value1" + value2 = "test_value2" + field_name = "test_field" + hash1 = _helpers._hash_value(value1, field_name) + hash2 = _helpers._hash_value(value2, field_name) + assert hash1 != hash2 + + +def test_hash_value_none(): + assert _helpers._hash_value(None, "test") is None + + +def test_logger_configured_default(logger): + assert not _helpers._logger_configured(logger) + + +def test_logger_configured_with_handler(logger): + mock_handler = logging.NullHandler() + logger.addHandler(mock_handler) + assert _helpers._logger_configured(logger) + + # Cleanup + logger.removeHandler(mock_handler) + + +def test_logger_configured_with_custom_level(logger): + original_level = logger.level + logger.level = logging.INFO + assert _helpers._logger_configured(logger) + + # Cleanup + logging.level = original_level + + +def test_logger_configured_with_propagate(logger): + original_propagate = logger.propagate + logger.propagate = False + assert _helpers._logger_configured(logger) + + # Cleanup + logger.propagate = original_propagate + + +def test_is_logging_enabled_with_no_level_set(logger, base_logger): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", "foogle"): + assert _helpers.is_logging_enabled(logger) is False + + +def test_is_logging_enabled_with_debug_disabled(caplog, logger, base_logger): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.INFO, logger=_MOCK_CHILD_LOGGER_NAME) + assert _helpers.is_logging_enabled(logger) is False + + +def test_is_logging_enabled_with_debug_enabled(caplog, logger, base_logger): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + assert _helpers.is_logging_enabled(logger) + + +def test_is_logging_enabled_with_base_logger_configured_with_info( + caplog, logger, base_logger +): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.INFO, logger=_MOCK_BASE_LOGGER_NAME) + + base_logger = logging.getLogger(_MOCK_BASE_LOGGER_NAME) + assert not _helpers.is_logging_enabled(base_logger) + assert not _helpers.is_logging_enabled(logger) + + +def test_is_logging_enabled_with_base_logger_configured_with_debug( + caplog, logger, base_logger +): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.DEBUG, logger=_MOCK_BASE_LOGGER_NAME) + + assert _helpers.is_logging_enabled(base_logger) + assert _helpers.is_logging_enabled(logger) + + +def test_is_logging_enabled_with_base_logger_info_child_logger_debug( + caplog, logger, base_logger +): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.INFO, logger=_MOCK_BASE_LOGGER_NAME) + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + + assert not _helpers.is_logging_enabled(base_logger) + assert _helpers.is_logging_enabled(logger) + + +def test_is_logging_enabled_with_base_logger_debug_child_logger_info( + caplog, logger, base_logger +): + with mock.patch("google.auth._helpers._BASE_LOGGER_NAME", _MOCK_BASE_LOGGER_NAME): + caplog.set_level(logging.DEBUG, logger=_MOCK_BASE_LOGGER_NAME) + caplog.set_level(logging.INFO, logger=_MOCK_CHILD_LOGGER_NAME) + + assert _helpers.is_logging_enabled(base_logger) + assert not _helpers.is_logging_enabled(logger) + + +def test_request_log_debug_enabled(logger, caplog, base_logger): + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.request_log( + logger, + "GET", + "http://example.com", + b'{"key": "value"}', + {"Authorization": "Bearer token"}, + ) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Making request..." + assert record.httpRequest == { + "method": "GET", + "url": "http://example.com", + "body": {"key": "value"}, + "headers": {"Authorization": "Bearer token"}, + } + + +def test_request_log_plain_text_debug_enabled(logger, caplog, base_logger): + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.request_log( + logger, + "GET", + "http://example.com", + b"This is plain text.", + {"Authorization": "Bearer token", "Content-Type": "text/plain"}, + ) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Making request..." + assert record.httpRequest == { + "method": "GET", + "url": "http://example.com", + "body": "", + "headers": {"Authorization": "Bearer token", "Content-Type": "text/plain"}, + } + + +def test_request_log_debug_disabled(logger, caplog, base_logger): + caplog.set_level(logging.INFO, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.request_log( + logger, + "POST", + "https://api.example.com", + "data", + {"Content-Type": "application/json"}, + ) + assert "Making request: POST https://api.example.com" not in caplog.text + + +def test_response_log_debug_enabled(logger, caplog, base_logger): + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.response_log(logger, {"payload": None}) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Response received..." + assert record.httpResponse == "" + + +def test_response_log_debug_disabled(logger, caplog): + caplog.set_level(logging.INFO, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.response_log(logger, "another_response") + assert "Response received..." not in caplog.text + + +def test_response_log_base_logger_configured(logger, caplog, base_logger): + caplog.set_level(logging.DEBUG, logger=_MOCK_BASE_LOGGER_NAME) + _helpers.response_log(logger, "another_response") + assert "Response received..." in caplog.text + + +def test_response_log_debug_enabled_response_list(logger, caplog, base_logger): + # NOTE: test the response log when response.json() returns a list as per + # https://requests.readthedocs.io/en/latest/api/#requests.Response.json. + class MockResponse: + def json(self): + return ["item1", "item2", "item3"] + + response = MockResponse() + caplog.set_level(logging.DEBUG, logger=_MOCK_CHILD_LOGGER_NAME) + _helpers.response_log(logger, response) + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.message == "Response received..." + assert record.httpResponse == ["", "", ""] + + +def test_parse_request_body_bytes_valid(): + body = b"key1=value1&key2=value2" + expected = {"key1": "value1", "key2": "value2"} + assert ( + _helpers._parse_request_body( + body, content_type="application/x-www-form-urlencoded" + ) + == expected + ) + + +def test_parse_request_body_bytes_empty(): + body = b"" + assert _helpers._parse_request_body(body) == "" + + +def test_parse_request_body_bytes_invalid_encoding(): + body = b"\xff\xfe\xfd" # Invalid UTF-8 sequence + assert _helpers._parse_request_body(body) is None + + +def test_parse_request_body_bytes_malformed_query(): + body = b"key1=value1&key2=value2" # missing equals + expected = {"key1": "value1", "key2": "value2"} + assert ( + _helpers._parse_request_body( + body, content_type="application/x-www-form-urlencoded" + ) + == expected + ) + + +def test_parse_request_body_none(): + assert _helpers._parse_request_body(None) is None + + +def test_parse_request_body_bytes_no_content_type(): + body = b'{"key": "value"}' + expected = {"key": "value"} + assert _helpers._parse_request_body(body) == expected + + +def test_parse_request_body_bytes_content_type_json(): + body = b'{"key": "value"}' + expected = {"key": "value"} + assert ( + _helpers._parse_request_body(body, content_type="application/json") == expected + ) + + +def test_parse_request_body_content_type_urlencoded(): + body = b"key=value" + expected = {"key": "value"} + assert ( + _helpers._parse_request_body( + body, content_type="application/x-www-form-urlencoded" + ) + == expected + ) + + +def test_parse_request_body_bytes_content_type_text(): + body = b"This is plain text." + expected = "This is plain text." + assert _helpers._parse_request_body(body, content_type="text/plain") == expected + + +def test_parse_request_body_content_type_invalid(): + body = b'{"key": "value"}' + assert _helpers._parse_request_body(body, content_type="invalid") is None + + +def test_parse_request_body_other_type(): + assert _helpers._parse_request_body(123) is None + assert _helpers._parse_request_body("string") is None + + +def test_parse_response_json_valid(): + class MockResponse: + def json(self): + return {"data": "test"} + + response = MockResponse() + expected = {"data": "test"} + assert _helpers._parse_response(response) == expected + + +def test_parse_response_json_invalid(): + class MockResponse: + def json(self): + raise json.JSONDecodeError("msg", "doc", 0) + + response = MockResponse() + assert _helpers._parse_response(response) is None + + +def test_parse_response_no_json_method(): + response = "plain text" + assert _helpers._parse_response(response) is None + + +def test_parse_response_none(): + assert _helpers._parse_response(None) is None diff --git a/tests/test__oauth2client.py b/tests/test__oauth2client.py index 9f0c192ae..2d4a809bb 100644 --- a/tests/test__oauth2client.py +++ b/tests/test__oauth2client.py @@ -116,6 +116,14 @@ def test__convert_appengine_app_assertion_credentials( app_identity, mock_oauth2client_gae_imports ): + # `oauth2client` requires `cgi` which was removed in Python 3.13 + # See https://github.com/googleapis/oauth2client/blob/50d20532a748f18e53f7d24ccbe6647132c979a9/oauth2client/contrib/appengine.py#L20 + # oauth2client is no longer being updated so this test must be skipped on newer Python Runtimes + if sys.version_info >= (3, 13): # pragma: NO COVER + pytest.skip( + "Skipping test for Python 3.13+ due to oauth2client incompatibility." + ) + import oauth2client.contrib.appengine # type: ignore service_account_id = "service_account_id" @@ -165,6 +173,14 @@ def reset__oauth2client_module(): def test_import_has_app_engine( mock_oauth2client_gae_imports, reset__oauth2client_module ): + # `oauth2client` requires `cgi` which was removed in Python 3.13 + # See https://github.com/googleapis/oauth2client/blob/50d20532a748f18e53f7d24ccbe6647132c979a9/oauth2client/contrib/appengine.py#L20 + # oauth2client is no longer being updated so this test must be skipped on newer Python Runtimes + if sys.version_info >= (3, 13): # pragma: NO COVER + pytest.skip( + "Skipping test for Python 3.13+ due to oauth2client incompatibility." + ) + importlib.reload(_oauth2client) assert _oauth2client._HAS_APPENGINE diff --git a/tests/test__service_account_info.py b/tests/test__service_account_info.py index 4fa85a599..be2657074 100644 --- a/tests/test__service_account_info.py +++ b/tests/test__service_account_info.py @@ -53,7 +53,7 @@ def test_from_dict_bad_private_key(): with pytest.raises(ValueError) as excinfo: _service_account_info.from_dict(info) - assert excinfo.match(r"key") + assert excinfo.match(r"(?i)(key|PEM)") def test_from_dict_bad_format(): diff --git a/tests/test_identity_pool.py b/tests/test_identity_pool.py index b2b0063ec..41fd18892 100644 --- a/tests/test_identity_pool.py +++ b/tests/test_identity_pool.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import datetime import http.client as http_client import json @@ -19,6 +20,7 @@ import urllib import mock +from OpenSSL import crypto import pytest # type: ignore from google.auth import _helpers, external_account @@ -48,6 +50,13 @@ DATA_DIR = os.path.join(os.path.dirname(__file__), "data") SUBJECT_TOKEN_TEXT_FILE = os.path.join(DATA_DIR, "external_subject_token.txt") SUBJECT_TOKEN_JSON_FILE = os.path.join(DATA_DIR, "external_subject_token.json") +TRUST_CHAIN_WITH_LEAF_FILE = os.path.join(DATA_DIR, "trust_chain_with_leaf.pem") +TRUST_CHAIN_WITHOUT_LEAF_FILE = os.path.join(DATA_DIR, "trust_chain_without_leaf.pem") +TRUST_CHAIN_WRONG_ORDER_FILE = os.path.join(DATA_DIR, "trust_chain_wrong_order.pem") +CERT_FILE = os.path.join(DATA_DIR, "public_cert.pem") +KEY_FILE = os.path.join(DATA_DIR, "privatekey.pem") +OTHER_CERT_FILE = os.path.join(DATA_DIR, "other_cert.pem") + SUBJECT_TOKEN_FIELD_NAME = "access_token" with open(SUBJECT_TOKEN_TEXT_FILE) as fh: @@ -57,6 +66,20 @@ JSON_FILE_CONTENT = json.load(fh) JSON_FILE_SUBJECT_TOKEN = JSON_FILE_CONTENT.get(SUBJECT_TOKEN_FIELD_NAME) +with open(CERT_FILE, "rb") as f: + CERT_FILE_CONTENT = base64.b64encode( + crypto.dump_certificate( + crypto.FILETYPE_ASN1, crypto.load_certificate(crypto.FILETYPE_PEM, f.read()) + ) + ).decode("utf-8") + +with open(OTHER_CERT_FILE, "rb") as f: + OTHER_CERT_FILE_CONTENT = base64.b64encode( + crypto.dump_certificate( + crypto.FILETYPE_ASN1, crypto.load_certificate(crypto.FILETYPE_PEM, f.read()) + ) + ).decode("utf-8") + TOKEN_URL = "https://sts.googleapis.com/v1/token" TOKEN_INFO_URL = "https://sts.googleapis.com/v1/introspect" SUBJECT_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:jwt" @@ -185,6 +208,24 @@ class TestCredentials(object): CREDENTIAL_SOURCE_CERTIFICATE_NOT_DEFAULT = { "certificate": {"certificate_config_location": "path/to/config"} } + CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WITH_LEAF = { + "certificate": { + "use_default_certificate_config": "true", + "trust_chain_path": TRUST_CHAIN_WITH_LEAF_FILE, + } + } + CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WITHOUT_LEAF = { + "certificate": { + "use_default_certificate_config": "true", + "trust_chain_path": TRUST_CHAIN_WITHOUT_LEAF_FILE, + } + } + CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WRONG_ORDER = { + "certificate": { + "use_default_certificate_config": "true", + "trust_chain_path": TRUST_CHAIN_WRONG_ORDER_FILE, + } + } SUCCESS_RESPONSE = { "access_token": "ACCESS_TOKEN", "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", @@ -936,14 +977,126 @@ def test_retrieve_subject_token_json_file(self): assert subject_token == JSON_FILE_SUBJECT_TOKEN - def test_retrieve_subject_token_certificate(self): + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_default( + self, mock_get_workload_cert_and_key_paths + ): credentials = self.make_credentials( credential_source=self.CREDENTIAL_SOURCE_CERTIFICATE ) subject_token = credentials.retrieve_subject_token(None) - assert subject_token == "" + assert subject_token == json.dumps([CERT_FILE_CONTENT]) + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_non_default_path( + self, mock_get_workload_cert_and_key_paths + ): + credentials = self.make_credentials( + credential_source=self.CREDENTIAL_SOURCE_CERTIFICATE_NOT_DEFAULT + ) + + subject_token = credentials.retrieve_subject_token(None) + + assert subject_token == json.dumps([CERT_FILE_CONTENT]) + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_trust_chain_with_leaf( + self, mock_get_workload_cert_and_key_paths + ): + credentials = self.make_credentials( + credential_source=self.CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WITH_LEAF + ) + + subject_token = credentials.retrieve_subject_token(None) + assert subject_token == json.dumps([CERT_FILE_CONTENT, OTHER_CERT_FILE_CONTENT]) + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_trust_chain_without_leaf( + self, mock_get_workload_cert_and_key_paths + ): + credentials = self.make_credentials( + credential_source=self.CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WITHOUT_LEAF + ) + + subject_token = credentials.retrieve_subject_token(None) + assert subject_token == json.dumps([CERT_FILE_CONTENT, OTHER_CERT_FILE_CONTENT]) + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_trust_chain_invalid_order( + self, mock_get_workload_cert_and_key_paths + ): + + credentials = self.make_credentials( + credential_source=self.CREDENTIAL_SOURCE_CERTIFICATE_TRUST_CHAIN_WRONG_ORDER + ) + + with pytest.raises(exceptions.RefreshError) as excinfo: + credentials.retrieve_subject_token(None) + + assert excinfo.match( + "The leaf certificate must be at the top of the trust chain file" + ) + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_trust_chain_file_does_not_exist( + self, mock_get_workload_cert_and_key_paths + ): + + credentials = self.make_credentials( + credential_source={ + "certificate": { + "use_default_certificate_config": "true", + "trust_chain_path": "fake.pem", + } + } + ) + + with pytest.raises(exceptions.RefreshError) as excinfo: + credentials.retrieve_subject_token(None) + + assert excinfo.match("Trust chain file 'fake.pem' was not found.") + + @mock.patch( + "google.auth.transport._mtls_helper._get_workload_cert_and_key_paths", + return_value=(CERT_FILE, KEY_FILE), + ) + def test_retrieve_subject_token_certificate_invalid_trust_chain_file( + self, mock_get_workload_cert_and_key_paths + ): + + credentials = self.make_credentials( + credential_source={ + "certificate": { + "use_default_certificate_config": "true", + "trust_chain_path": SUBJECT_TOKEN_TEXT_FILE, + } + } + ) + + with pytest.raises(exceptions.RefreshError) as excinfo: + credentials.retrieve_subject_token(None) + + assert excinfo.match("Error loading PEM certificates from the trust chain file") def test_retrieve_subject_token_json_file_invalid_field_name(self): credential_source = { diff --git a/tests/test_impersonated_credentials.py b/tests/test_impersonated_credentials.py index 8f6b22670..4aa357e3e 100644 --- a/tests/test_impersonated_credentials.py +++ b/tests/test_impersonated_credentials.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy import datetime import http.client as http_client import json @@ -35,6 +36,9 @@ PRIVATE_KEY_BYTES = fh.read() SERVICE_ACCOUNT_JSON_FILE = os.path.join(DATA_DIR, "service_account.json") +IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_FILE = os.path.join( + DATA_DIR, "impersonated_service_account_authorized_user_source.json" +) ID_TOKEN_DATA = ( "eyJhbGciOiJSUzI1NiIsImtpZCI6ImRmMzc1ODkwOGI3OTIyOTNhZDk3N2Ew" @@ -49,6 +53,9 @@ with open(SERVICE_ACCOUNT_JSON_FILE, "rb") as fh: SERVICE_ACCOUNT_INFO = json.load(fh) +with open(IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_FILE, "rb") as fh: + IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_INFO = json.load(fh) + SIGNER = crypt.RSASigner.from_string(PRIVATE_KEY_BYTES, "1") TOKEN_URI = "https://example.com/oauth2/token" @@ -148,6 +155,38 @@ def make_credentials( iam_endpoint_override=iam_endpoint_override, ) + def test_from_impersonated_service_account_info(self): + credentials = impersonated_credentials.Credentials.from_impersonated_service_account_info( + IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_INFO + ) + assert isinstance(credentials, impersonated_credentials.Credentials) + + def test_from_impersonated_service_account_info_with_invalid_source_credentials_type( + self + ): + info = copy.deepcopy(IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_INFO) + assert "source_credentials" in info + # Set the source_credentials to an invalid type + info["source_credentials"]["type"] = "invalid_type" + with pytest.raises(exceptions.DefaultCredentialsError) as excinfo: + impersonated_credentials.Credentials.from_impersonated_service_account_info( + info + ) + assert excinfo.match( + "source credential of type {} is not supported".format("invalid_type") + ) + + def test_from_impersonated_service_account_info_with_invalid_impersonation_url( + self + ): + info = copy.deepcopy(IMPERSONATED_SERVICE_ACCOUNT_AUTHORIZED_USER_SOURCE_INFO) + info["service_account_impersonation_url"] = "invalid_url" + with pytest.raises(exceptions.DefaultCredentialsError) as excinfo: + impersonated_credentials.Credentials.from_impersonated_service_account_info( + info + ) + assert excinfo.match(r"Cannot extract target principal from") + def test_get_cred_info(self): credentials = self.make_credentials() assert not credentials.get_cred_info()