diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e2b39f94..9ee60f7e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 + digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 412b0b56..4e1b1fb8 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd86..00000000 --- a/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Ɓukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg index f9cfcd33..fc517b6c 100644 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg index f9cfcd33..fc517b6c 100644 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg index f9cfcd33..fc517b6c 100644 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg index f9cfcd33..fc517b6c 100644 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index cf5de74c..311a8d54 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 6af254d8..12773f6b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +### [3.3.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.0...v3.3.1) (2021-07-26) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#180](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/180)) ([b0e9533](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/b0e953347b43dd2252cf907bfaa8b6fbb16379b6)) +* enable self signed jwt for grpc ([#185](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/185)) ([3dd3bdd](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/3dd3bdd9074297803eced776e0ab12491f91da3b)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#181](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/181)) ([5bc909c](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/5bc909c606fdf7cc037e1f014a997272f6284c65)) + + +### Miscellaneous Chores + +* release as 3.3.1 ([#186](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/186)) ([7c1a7b9](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/7c1a7b98fcb08dbe081c8c645d079a6586fa7b7b)) + ## [3.3.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.2.0...v3.3.0) (2021-07-12) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a09a0802..26ea1fac 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -177,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 099e611b..637b0cdf 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -388,6 +388,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def get_data_source( diff --git a/samples/snippets/manage_transfer_configs.py b/samples/snippets/manage_transfer_configs.py index 6b4abd78..5f775f10 100644 --- a/samples/snippets/manage_transfer_configs.py +++ b/samples/snippets/manage_transfer_configs.py @@ -135,9 +135,7 @@ def schedule_backfill(override_values={}): ) response = transfer_client.schedule_transfer_runs( - parent=transfer_config_name, - start_time=start_time, - end_time=end_time, + parent=transfer_config_name, start_time=start_time, end_time=end_time, ) print("Started transfer runs:") diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b3c8658a..9fc7f178 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -159,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) @@ -226,7 +227,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index e763d1e2..1f919865 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -google-cloud-bigquery==2.20.0 +google-cloud-bigquery==2.22.1 google-cloud-pubsub==2.6.1 pytest==6.2.4 mock==4.0.3 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index c3cb098d..c1d4d391 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-bigquery-datatransfer==3.2.0 +google-cloud-bigquery-datatransfer==3.3.0 diff --git a/samples/snippets/run_notification_test.py b/samples/snippets/run_notification_test.py index 02f24266..4c41e689 100644 --- a/samples/snippets/run_notification_test.py +++ b/samples/snippets/run_notification_test.py @@ -17,8 +17,7 @@ def test_run_notification(capsys, transfer_config_name, pubsub_topic): run_notification.run_notification( - transfer_config_name=transfer_config_name, - pubsub_topic=pubsub_topic, + transfer_config_name=transfer_config_name, pubsub_topic=pubsub_topic, ) out, _ = capsys.readouterr() assert "Updated config:" in out diff --git a/setup.py b/setup.py index 32f5cfb3..d75c0a36 100644 --- a/setup.py +++ b/setup.py @@ -22,14 +22,17 @@ name = "google-cloud-bigquery-datatransfer" description = "BigQuery Data Transfer API client library" -version = "3.3.0" +version = "3.3.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = ( - "google-api-core[grpc] >= 1.26.0, <2.0.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.26.0, <3.0.0dev", "proto-plus >= 1.15.0", "packaging >= 14.3", ) diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index be3b2c25..6328d1f8 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -131,18 +131,6 @@ def test_data_transfer_service_client_from_service_account_info(client_class): assert client.transport._host == "bigquerydatatransfer.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,] -) -def test_data_transfer_service_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -150,7 +138,7 @@ def test_data_transfer_service_client_service_account_always_use_jwt(client_clas (transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_data_transfer_service_client_service_account_always_use_jwt_true( +def test_data_transfer_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -160,6 +148,13 @@ def test_data_transfer_service_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,] @@ -244,6 +239,7 @@ def test_data_transfer_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -260,6 +256,7 @@ def test_data_transfer_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -276,6 +273,7 @@ def test_data_transfer_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -304,6 +302,7 @@ def test_data_transfer_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -380,6 +379,7 @@ def test_data_transfer_service_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -413,6 +413,7 @@ def test_data_transfer_service_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -434,6 +435,7 @@ def test_data_transfer_service_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -468,6 +470,7 @@ def test_data_transfer_service_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -502,6 +505,7 @@ def test_data_transfer_service_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -521,6 +525,7 @@ def test_data_transfer_service_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, )