diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 9bdafee7..0954585f 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,4 +1,3 @@
docker:
- digest: sha256:612842ba5ccf62b4e3983fe6dc453cf66883c74bc168aa62da7acaed1e2fdc93
- image: gcr.io/repo-automation-bots/owlbot-python:latest
-
+ image: gcr.io/repo-automation-bots/owlbot-python:latest
+ digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 4558c4cd..ae570eb0 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,7 +5,7 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# The @googleapis/api-bigquery is the default owner for changes in this repo
-* @googleapis/api-bigquery
+* @googleapis/api-bigquery @googleapis/yoshi-python
# The python-samples-reviewers team is the default owner for samples changes
/samples/ @googleapis/python-samples-owners
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
index fc281c05..6fe78aa7 100644
--- a/.github/header-checker-lint.yml
+++ b/.github/header-checker-lint.yml
@@ -1,6 +1,6 @@
{"allowedCopyrightHolders": ["Google LLC"],
"allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
- "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
"sourceFileExtensions": [
"ts",
"js",
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 76c8ed2f..064ab4b9 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
+TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
cd github/python-bigquery-datatransfer
python3 setup.py sdist bdist_wheel
-twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
+twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 437024a8..ec1f2210 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,18 +23,8 @@ env_vars: {
value: "github/python-bigquery-datatransfer/.kokoro/release.sh"
}
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
# Tokens needed to report release status back to GitHub
env_vars: {
key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
\ No newline at end of file
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 00000000..acf30226
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 00000000..f9cfcd33
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 32302e48..62eb5a77 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,8 +1,22 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.4.0
+ rev: v4.0.1
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@@ -12,6 +26,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.9.0
+ rev: 3.9.2
hooks:
- id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 5da24c19..9d9df181 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -6,6 +6,7 @@
"issue_tracker": "https://issuetracker.google.com/savedsearches/559654",
"release_level": "ga",
"language": "python",
+ "library_type": "GAPIC_AUTO",
"repo": "googleapis/python-bigquery-datatransfer",
"distribution_name": "google-cloud-bigquery-datatransfer",
"api_id": "bigquerydatatransfer.googleapis.com",
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 460c160d..ce1b11ae 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,23 @@
[1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history
+## [3.2.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.1.1...v3.2.0) (2021-06-22)
+
+
+### Features
+
+* support self-signed JWT flow for service accounts ([046c368](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/046c368cf5a75a210b0ecc7e6e1eee9bcd907669))
+
+
+### Bug Fixes
+
+* add async client to %name_%version/init.py ([046c368](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/046c368cf5a75a210b0ecc7e6e1eee9bcd907669))
+
+
+### Documentation
+
+* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/1127)) ([#164](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/164)) ([2741e4f](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/2741e4fb1d9074494872fafcec96d870b14b671d)), closes [#1126](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/1126)
+
### [3.1.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.1.0...v3.1.1) (2021-04-07)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 88e59522..5165217c 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -69,7 +69,6 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
- $ nox -s unit-2.7
$ nox -s unit-3.8
$ ...
@@ -144,7 +143,6 @@ Running System Tests
# Run all system tests
$ nox -s system-3.8
- $ nox -s system-2.7
# Run a single system test
$ nox -s system-3.8 -- -k
@@ -152,29 +150,14 @@ Running System Tests
.. note::
- System tests are only configured to run under Python 2.7 and
- Python 3.8. For expediency, we do not run them in older versions
- of Python 3.
+ System tests are only configured to run under Python 3.8.
+ For expediency, we do not run them in older versions of Python 3.
This alone will not run the tests. You'll need to change some local
auth settings and change some configuration in your project to
run all the tests.
-- System tests will be run against an actual project and
- so you'll need to provide some environment variables to facilitate
- authentication to your project:
-
- - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
- Such a file can be downloaded directly from the developer's console by clicking
- "Generate new JSON key". See private key
- `docs `__
- for more details.
-
-- Once you have downloaded your json keys, set the environment variable
- ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
-
- $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
-
+- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__.
*************
Test Coverage
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..8b58ae9c
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index bcd37bbd..b0a29546 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,9 +1,20 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
+}
/* Ensure minimum width for 'Parameters' / 'Returns' column */
dl.field-list > dt {
min-width: 100px
}
+
+/* Insert space between methods for readability */
+dl.method {
+ padding-top: 10px;
+ padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+ padding-bottom: 50px
+}
diff --git a/docs/bigquery_datatransfer_v1/data_transfer_service.rst b/docs/bigquery_datatransfer_v1/data_transfer_service.rst
index 58f85396..480f43ed 100644
--- a/docs/bigquery_datatransfer_v1/data_transfer_service.rst
+++ b/docs/bigquery_datatransfer_v1/data_transfer_service.rst
@@ -5,7 +5,6 @@ DataTransferService
:members:
:inherited-members:
-
.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers
:members:
:inherited-members:
diff --git a/docs/conf.py b/docs/conf.py
index 8e9ba316..96f393bf 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,17 @@
# -*- coding: utf-8 -*-
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
# google-cloud-bigquery-datatransfer documentation build configuration file
#
@@ -67,9 +80,9 @@
master_doc = "index"
# General information about the project.
-project = u"google-cloud-bigquery-datatransfer"
-copyright = u"2019, Google"
-author = u"Google APIs"
+project = "google-cloud-bigquery-datatransfer"
+copyright = "2019, Google"
+author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -268,7 +281,7 @@
(
master_doc,
"google-cloud-bigquery-datatransfer.tex",
- u"google-cloud-bigquery-datatransfer Documentation",
+ "google-cloud-bigquery-datatransfer Documentation",
author,
"manual",
)
@@ -303,7 +316,7 @@
(
master_doc,
"google-cloud-bigquery-datatransfer",
- u"google-cloud-bigquery-datatransfer Documentation",
+ "google-cloud-bigquery-datatransfer Documentation",
[author],
1,
)
@@ -322,7 +335,7 @@
(
master_doc,
"google-cloud-bigquery-datatransfer",
- u"google-cloud-bigquery-datatransfer Documentation",
+ "google-cloud-bigquery-datatransfer Documentation",
author,
"google-cloud-bigquery-datatransfer",
"google-cloud-bigquery-datatransfer Library",
@@ -350,6 +363,7 @@
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+ "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
index 1cb29d4c..536d17b2 100644
--- a/docs/multiprocessing.rst
+++ b/docs/multiprocessing.rst
@@ -1,7 +1,7 @@
.. note::
- Because this client uses :mod:`grpcio` library, it is safe to
+ Because this client uses :mod:`grpc` library, it is safe to
share instances across threads. In multiprocessing scenarios, the best
practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
:class:`multiprocessing.Process`.
diff --git a/google/cloud/bigquery_datatransfer/__init__.py b/google/cloud/bigquery_datatransfer/__init__.py
index 721bb151..bcde8ce8 100644
--- a/google/cloud/bigquery_datatransfer/__init__.py
+++ b/google/cloud/bigquery_datatransfer/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,12 +14,13 @@
# limitations under the License.
#
-from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import (
- DataTransferServiceAsyncClient,
-)
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.client import (
DataTransferServiceClient,
)
+from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import (
+ DataTransferServiceAsyncClient,
+)
+
from google.cloud.bigquery_datatransfer_v1.types.datatransfer import (
CheckValidCredsRequest,
)
@@ -95,16 +95,15 @@
from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferType
__all__ = (
+ "DataTransferServiceClient",
+ "DataTransferServiceAsyncClient",
"CheckValidCredsRequest",
"CheckValidCredsResponse",
"CreateTransferConfigRequest",
"DataSource",
"DataSourceParameter",
- "DataTransferServiceAsyncClient",
- "DataTransferServiceClient",
"DeleteTransferConfigRequest",
"DeleteTransferRunRequest",
- "EmailPreferences",
"GetDataSourceRequest",
"GetTransferConfigRequest",
"GetTransferRunRequest",
@@ -116,15 +115,16 @@
"ListTransferLogsResponse",
"ListTransferRunsRequest",
"ListTransferRunsResponse",
- "ScheduleOptions",
"ScheduleTransferRunsRequest",
"ScheduleTransferRunsResponse",
"StartManualTransferRunsRequest",
"StartManualTransferRunsResponse",
+ "UpdateTransferConfigRequest",
+ "EmailPreferences",
+ "ScheduleOptions",
"TransferConfig",
"TransferMessage",
"TransferRun",
"TransferState",
"TransferType",
- "UpdateTransferConfigRequest",
)
diff --git a/google/cloud/bigquery_datatransfer_v1/__init__.py b/google/cloud/bigquery_datatransfer_v1/__init__.py
index 258e1f36..1c6f0fc8 100644
--- a/google/cloud/bigquery_datatransfer_v1/__init__.py
+++ b/google/cloud/bigquery_datatransfer_v1/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,6 +16,7 @@
from .services.data_transfer_service import DataTransferServiceClient
from .services.data_transfer_service import DataTransferServiceAsyncClient
+
from .types.datatransfer import CheckValidCredsRequest
from .types.datatransfer import CheckValidCredsResponse
from .types.datatransfer import CreateTransferConfigRequest
@@ -48,13 +48,14 @@
from .types.transfer import TransferState
from .types.transfer import TransferType
-
__all__ = (
+ "DataTransferServiceAsyncClient",
"CheckValidCredsRequest",
"CheckValidCredsResponse",
"CreateTransferConfigRequest",
"DataSource",
"DataSourceParameter",
+ "DataTransferServiceClient",
"DeleteTransferConfigRequest",
"DeleteTransferRunRequest",
"EmailPreferences",
@@ -80,6 +81,4 @@
"TransferState",
"TransferType",
"UpdateTransferConfigRequest",
- "DataTransferServiceClient",
- "DataTransferServiceAsyncClient",
)
diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json
new file mode 100644
index 00000000..75ee9340
--- /dev/null
+++ b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json
@@ -0,0 +1,163 @@
+ {
+ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+ "language": "python",
+ "libraryPackage": "google.cloud.bigquery_datatransfer_v1",
+ "protoPackage": "google.cloud.bigquery.datatransfer.v1",
+ "schema": "1.0",
+ "services": {
+ "DataTransferService": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "DataTransferServiceClient",
+ "rpcs": {
+ "CheckValidCreds": {
+ "methods": [
+ "check_valid_creds"
+ ]
+ },
+ "CreateTransferConfig": {
+ "methods": [
+ "create_transfer_config"
+ ]
+ },
+ "DeleteTransferConfig": {
+ "methods": [
+ "delete_transfer_config"
+ ]
+ },
+ "DeleteTransferRun": {
+ "methods": [
+ "delete_transfer_run"
+ ]
+ },
+ "GetDataSource": {
+ "methods": [
+ "get_data_source"
+ ]
+ },
+ "GetTransferConfig": {
+ "methods": [
+ "get_transfer_config"
+ ]
+ },
+ "GetTransferRun": {
+ "methods": [
+ "get_transfer_run"
+ ]
+ },
+ "ListDataSources": {
+ "methods": [
+ "list_data_sources"
+ ]
+ },
+ "ListTransferConfigs": {
+ "methods": [
+ "list_transfer_configs"
+ ]
+ },
+ "ListTransferLogs": {
+ "methods": [
+ "list_transfer_logs"
+ ]
+ },
+ "ListTransferRuns": {
+ "methods": [
+ "list_transfer_runs"
+ ]
+ },
+ "ScheduleTransferRuns": {
+ "methods": [
+ "schedule_transfer_runs"
+ ]
+ },
+ "StartManualTransferRuns": {
+ "methods": [
+ "start_manual_transfer_runs"
+ ]
+ },
+ "UpdateTransferConfig": {
+ "methods": [
+ "update_transfer_config"
+ ]
+ }
+ }
+ },
+ "grpc-async": {
+ "libraryClient": "DataTransferServiceAsyncClient",
+ "rpcs": {
+ "CheckValidCreds": {
+ "methods": [
+ "check_valid_creds"
+ ]
+ },
+ "CreateTransferConfig": {
+ "methods": [
+ "create_transfer_config"
+ ]
+ },
+ "DeleteTransferConfig": {
+ "methods": [
+ "delete_transfer_config"
+ ]
+ },
+ "DeleteTransferRun": {
+ "methods": [
+ "delete_transfer_run"
+ ]
+ },
+ "GetDataSource": {
+ "methods": [
+ "get_data_source"
+ ]
+ },
+ "GetTransferConfig": {
+ "methods": [
+ "get_transfer_config"
+ ]
+ },
+ "GetTransferRun": {
+ "methods": [
+ "get_transfer_run"
+ ]
+ },
+ "ListDataSources": {
+ "methods": [
+ "list_data_sources"
+ ]
+ },
+ "ListTransferConfigs": {
+ "methods": [
+ "list_transfer_configs"
+ ]
+ },
+ "ListTransferLogs": {
+ "methods": [
+ "list_transfer_logs"
+ ]
+ },
+ "ListTransferRuns": {
+ "methods": [
+ "list_transfer_runs"
+ ]
+ },
+ "ScheduleTransferRuns": {
+ "methods": [
+ "schedule_transfer_runs"
+ ]
+ },
+ "StartManualTransferRuns": {
+ "methods": [
+ "start_manual_transfer_runs"
+ ]
+ },
+ "UpdateTransferConfig": {
+ "methods": [
+ "update_transfer_config"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto b/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto
deleted file mode 100644
index a401c15c..00000000
--- a/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto
+++ /dev/null
@@ -1,843 +0,0 @@
-// Copyright 2020 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package google.cloud.bigquery.datatransfer.v1;
-
-import "google/api/annotations.proto";
-import "google/api/client.proto";
-import "google/api/field_behavior.proto";
-import "google/api/resource.proto";
-import "google/cloud/bigquery/datatransfer/v1/transfer.proto";
-import "google/protobuf/duration.proto";
-import "google/protobuf/empty.proto";
-import "google/protobuf/field_mask.proto";
-import "google/protobuf/timestamp.proto";
-import "google/protobuf/wrappers.proto";
-
-option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
-option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
-option java_multiple_files = true;
-option java_outer_classname = "DataTransferProto";
-option java_package = "com.google.cloud.bigquery.datatransfer.v1";
-option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
-option ruby_package = "Google::Cloud::Bigquery::DataTransfer::V1";
-
-// The Google BigQuery Data Transfer Service API enables BigQuery users to
-// configure the transfer of their data from other Google Products into
-// BigQuery. This service contains methods that are end user exposed. It backs
-// up the frontend.
-service DataTransferService {
- option (google.api.default_host) = "bigquerydatatransfer.googleapis.com";
- option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
-
- // Retrieves a supported data source and returns its settings,
- // which can be used for UI rendering.
- rpc GetDataSource(GetDataSourceRequest) returns (DataSource) {
- option (google.api.http) = {
- get: "/v1/{name=projects/*/locations/*/dataSources/*}"
- additional_bindings {
- get: "/v1/{name=projects/*/dataSources/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
-
- // Lists supported data sources and returns their settings,
- // which can be used for UI rendering.
- rpc ListDataSources(ListDataSourcesRequest) returns (ListDataSourcesResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/locations/*}/dataSources"
- additional_bindings {
- get: "/v1/{parent=projects/*}/dataSources"
- }
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Creates a new data transfer configuration.
- rpc CreateTransferConfig(CreateTransferConfigRequest) returns (TransferConfig) {
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/locations/*}/transferConfigs"
- body: "transfer_config"
- additional_bindings {
- post: "/v1/{parent=projects/*}/transferConfigs"
- body: "transfer_config"
- }
- };
- option (google.api.method_signature) = "parent,transfer_config";
- }
-
- // Updates a data transfer configuration.
- // All fields must be set, even if they are not updated.
- rpc UpdateTransferConfig(UpdateTransferConfigRequest) returns (TransferConfig) {
- option (google.api.http) = {
- patch: "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}"
- body: "transfer_config"
- additional_bindings {
- patch: "/v1/{transfer_config.name=projects/*/transferConfigs/*}"
- body: "transfer_config"
- }
- };
- option (google.api.method_signature) = "transfer_config,update_mask";
- }
-
- // Deletes a data transfer configuration,
- // including any associated transfer runs and logs.
- rpc DeleteTransferConfig(DeleteTransferConfigRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
- additional_bindings {
- delete: "/v1/{name=projects/*/transferConfigs/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
-
- // Returns information about a data transfer config.
- rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) {
- option (google.api.http) = {
- get: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
- additional_bindings {
- get: "/v1/{name=projects/*/transferConfigs/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
-
- // Returns information about all data transfers in the project.
- rpc ListTransferConfigs(ListTransferConfigsRequest) returns (ListTransferConfigsResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/locations/*}/transferConfigs"
- additional_bindings {
- get: "/v1/{parent=projects/*}/transferConfigs"
- }
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Creates transfer runs for a time range [start_time, end_time].
- // For each date - or whatever granularity the data source supports - in the
- // range, one transfer run is created.
- // Note that runs are created per UTC time in the time range.
- // DEPRECATED: use StartManualTransferRuns instead.
- rpc ScheduleTransferRuns(ScheduleTransferRunsRequest) returns (ScheduleTransferRunsResponse) {
- option deprecated = true;
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns"
- body: "*"
- additional_bindings {
- post: "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns"
- body: "*"
- }
- };
- option (google.api.method_signature) = "parent,start_time,end_time";
- }
-
- // Start manual transfer runs to be executed now with schedule_time equal to
- // current time. The transfer runs can be created for a time range where the
- // run_time is between start_time (inclusive) and end_time (exclusive), or for
- // a specific run_time.
- rpc StartManualTransferRuns(StartManualTransferRunsRequest) returns (StartManualTransferRunsResponse) {
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns"
- body: "*"
- additional_bindings {
- post: "/v1/{parent=projects/*/transferConfigs/*}:startManualRuns"
- body: "*"
- }
- };
- }
-
- // Returns information about the particular transfer run.
- rpc GetTransferRun(GetTransferRunRequest) returns (TransferRun) {
- option (google.api.http) = {
- get: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
- additional_bindings {
- get: "/v1/{name=projects/*/transferConfigs/*/runs/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
-
- // Deletes the specified transfer run.
- rpc DeleteTransferRun(DeleteTransferRunRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
- additional_bindings {
- delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
-
- // Returns information about running and completed jobs.
- rpc ListTransferRuns(ListTransferRunsRequest) returns (ListTransferRunsResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs"
- additional_bindings {
- get: "/v1/{parent=projects/*/transferConfigs/*}/runs"
- }
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Returns user facing log messages for the data transfer run.
- rpc ListTransferLogs(ListTransferLogsRequest) returns (ListTransferLogsResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs"
- additional_bindings {
- get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs"
- }
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Returns true if valid credentials exist for the given data source and
- // requesting user.
- // Some data sources doesn't support service account, so we need to talk to
- // them on behalf of the end user. This API just checks whether we have OAuth
- // token for the particular user, which is a pre-requisite before user can
- // create a transfer config.
- rpc CheckValidCreds(CheckValidCredsRequest) returns (CheckValidCredsResponse) {
- option (google.api.http) = {
- post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds"
- body: "*"
- additional_bindings {
- post: "/v1/{name=projects/*/dataSources/*}:checkValidCreds"
- body: "*"
- }
- };
- option (google.api.method_signature) = "name";
- }
-}
-
-// Represents a data source parameter with validation rules, so that
-// parameters can be rendered in the UI. These parameters are given to us by
-// supported data sources, and include all needed information for rendering
-// and validation.
-// Thus, whoever uses this api can decide to generate either generic ui,
-// or custom data source specific forms.
-message DataSourceParameter {
- // Parameter type.
- enum Type {
- // Type unspecified.
- TYPE_UNSPECIFIED = 0;
-
- // String parameter.
- STRING = 1;
-
- // Integer parameter (64-bits).
- // Will be serialized to json as string.
- INTEGER = 2;
-
- // Double precision floating point parameter.
- DOUBLE = 3;
-
- // Boolean parameter.
- BOOLEAN = 4;
-
- // Deprecated. This field has no effect.
- RECORD = 5;
-
- // Page ID for a Google+ Page.
- PLUS_PAGE = 6;
- }
-
- // Parameter identifier.
- string param_id = 1;
-
- // Parameter display name in the user interface.
- string display_name = 2;
-
- // Parameter description.
- string description = 3;
-
- // Parameter type.
- Type type = 4;
-
- // Is parameter required.
- bool required = 5;
-
- // Deprecated. This field has no effect.
- bool repeated = 6;
-
- // Regular expression which can be used for parameter validation.
- string validation_regex = 7;
-
- // All possible values for the parameter.
- repeated string allowed_values = 8;
-
- // For integer and double values specifies minimum allowed value.
- google.protobuf.DoubleValue min_value = 9;
-
- // For integer and double values specifies maxminum allowed value.
- google.protobuf.DoubleValue max_value = 10;
-
- // Deprecated. This field has no effect.
- repeated DataSourceParameter fields = 11;
-
- // Description of the requirements for this field, in case the user input does
- // not fulfill the regex pattern or min/max values.
- string validation_description = 12;
-
- // URL to a help document to further explain the naming requirements.
- string validation_help_url = 13;
-
- // Cannot be changed after initial creation.
- bool immutable = 14;
-
- // Deprecated. This field has no effect.
- bool recurse = 15;
-
- // If true, it should not be used in new transfers, and it should not be
- // visible to users.
- bool deprecated = 20;
-}
-
-// Represents data source metadata. Metadata is sufficient to
-// render UI and request proper OAuth tokens.
-message DataSource {
- option (google.api.resource) = {
- type: "bigquerydatatransfer.googleapis.com/DataSource"
- pattern: "projects/{project}/dataSources/{data_source}"
- pattern: "projects/{project}/locations/{location}/dataSources/{data_source}"
- };
-
- // The type of authorization needed for this data source.
- enum AuthorizationType {
- // Type unspecified.
- AUTHORIZATION_TYPE_UNSPECIFIED = 0;
-
- // Use OAuth 2 authorization codes that can be exchanged
- // for a refresh token on the backend.
- AUTHORIZATION_CODE = 1;
-
- // Return an authorization code for a given Google+ page that can then be
- // exchanged for a refresh token on the backend.
- GOOGLE_PLUS_AUTHORIZATION_CODE = 2;
-
- // Use First Party Client OAuth. First Party Client OAuth doesn't require a
- // refresh token to get an offline access token. Instead, it uses a
- // client-signed JWT assertion to retrieve an access token.
- FIRST_PARTY_OAUTH = 3;
- }
-
- // Represents how the data source supports data auto refresh.
- enum DataRefreshType {
- // The data source won't support data auto refresh, which is default value.
- DATA_REFRESH_TYPE_UNSPECIFIED = 0;
-
- // The data source supports data auto refresh, and runs will be scheduled
- // for the past few days. Does not allow custom values to be set for each
- // transfer config.
- SLIDING_WINDOW = 1;
-
- // The data source supports data auto refresh, and runs will be scheduled
- // for the past few days. Allows custom values to be set for each transfer
- // config.
- CUSTOM_SLIDING_WINDOW = 2;
- }
-
- // Output only. Data source resource name.
- string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Data source id.
- string data_source_id = 2;
-
- // User friendly data source name.
- string display_name = 3;
-
- // User friendly data source description string.
- string description = 4;
-
- // Data source client id which should be used to receive refresh token.
- string client_id = 5;
-
- // Api auth scopes for which refresh token needs to be obtained. These are
- // scopes needed by a data source to prepare data and ingest them into
- // BigQuery, e.g., https://www.googleapis.com/auth/bigquery
- repeated string scopes = 6;
-
- // Deprecated. This field has no effect.
- TransferType transfer_type = 7 [deprecated = true];
-
- // Deprecated. This field has no effect.
- bool supports_multiple_transfers = 8 [deprecated = true];
-
- // The number of seconds to wait for an update from the data source
- // before the Data Transfer Service marks the transfer as FAILED.
- int32 update_deadline_seconds = 9;
-
- // Default data transfer schedule.
- // Examples of valid schedules include:
- // `1st,3rd monday of month 15:30`,
- // `every wed,fri of jan,jun 13:15`, and
- // `first sunday of quarter 00:00`.
- string default_schedule = 10;
-
- // Specifies whether the data source supports a user defined schedule, or
- // operates on the default schedule.
- // When set to `true`, user can override default schedule.
- bool supports_custom_schedule = 11;
-
- // Data source parameters.
- repeated DataSourceParameter parameters = 12;
-
- // Url for the help document for this data source.
- string help_url = 13;
-
- // Indicates the type of authorization.
- AuthorizationType authorization_type = 14;
-
- // Specifies whether the data source supports automatic data refresh for the
- // past few days, and how it's supported.
- // For some data sources, data might not be complete until a few days later,
- // so it's useful to refresh data automatically.
- DataRefreshType data_refresh_type = 15;
-
- // Default data refresh window on days.
- // Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`.
- int32 default_data_refresh_window_days = 16;
-
- // Disables backfilling and manual run scheduling
- // for the data source.
- bool manual_runs_disabled = 17;
-
- // The minimum interval for scheduler to schedule runs.
- google.protobuf.Duration minimum_schedule_interval = 18;
-}
-
-// A request to get data source info.
-message GetDataSourceRequest {
- // Required. The field will contain name of the resource requested, for example:
- // `projects/{project_id}/dataSources/{data_source_id}` or
- // `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/DataSource"
- }
- ];
-}
-
-// Request to list supported data sources and their data transfer settings.
-message ListDataSourcesRequest {
- // Required. The BigQuery project id for which data sources should be returned.
- // Must be in the form: `projects/{project_id}` or
- // `projects/{project_id}/locations/{location_id}
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- child_type: "bigquerydatatransfer.googleapis.com/DataSource"
- }
- ];
-
- // Pagination token, which can be used to request a specific page
- // of `ListDataSourcesRequest` list results. For multiple-page
- // results, `ListDataSourcesResponse` outputs
- // a `next_page` token, which can be used as the
- // `page_token` value to request the next page of list results.
- string page_token = 3;
-
- // Page size. The default page size is the maximum value of 1000 results.
- int32 page_size = 4;
-}
-
-// Returns list of supported data sources and their metadata.
-message ListDataSourcesResponse {
- // List of supported data sources and their transfer settings.
- repeated DataSource data_sources = 1;
-
- // Output only. The next-pagination token. For multiple-page list results,
- // this token can be used as the
- // `ListDataSourcesRequest.page_token`
- // to request the next page of list results.
- string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
-}
-
-// A request to create a data transfer configuration. If new credentials are
-// needed for this transfer configuration, an authorization code must be
-// provided. If an authorization code is provided, the transfer configuration
-// will be associated with the user id corresponding to the
-// authorization code. Otherwise, the transfer configuration will be associated
-// with the calling user.
-message CreateTransferConfigRequest {
- // Required. The BigQuery project id where the transfer configuration should be created.
- // Must be in the format projects/{project_id}/locations/{location_id} or
- // projects/{project_id}. If specified location and location of the
- // destination bigquery dataset do not match - the request will fail.
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- child_type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }
- ];
-
- // Required. Data transfer configuration to create.
- TransferConfig transfer_config = 2 [(google.api.field_behavior) = REQUIRED];
-
- // Optional OAuth2 authorization code to use with this transfer configuration.
- // This is required if new credentials are needed, as indicated by
- // `CheckValidCreds`.
- // In order to obtain authorization_code, please make a
- // request to
- // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri=
- //
- // * client_id should be OAuth client_id of BigQuery DTS API for the given
- // data source returned by ListDataSources method.
- // * data_source_scopes are the scopes returned by ListDataSources method.
- // * redirect_uri is an optional parameter. If not specified, then
- // authorization code is posted to the opener of authorization flow window.
- // Otherwise it will be sent to the redirect uri. A special value of
- // urn:ietf:wg:oauth:2.0:oob means that authorization code should be
- // returned in the title bar of the browser, with the page text prompting
- // the user to copy the code and paste it in the application.
- string authorization_code = 3;
-
- // Optional version info. If users want to find a very recent access token,
- // that is, immediately after approving access, users have to set the
- // version_info claim in the token request. To obtain the version_info, users
- // must use the "none+gsession" response type. which be return a
- // version_info back in the authorization response which be be put in a JWT
- // claim in the token request.
- string version_info = 5;
-
- // Optional service account name. If this field is set, transfer config will
- // be created with this service account credentials. It requires that
- // requesting user calling this API has permissions to act as this service
- // account.
- string service_account_name = 6;
-}
-
-// A request to update a transfer configuration. To update the user id of the
-// transfer configuration, an authorization code needs to be provided.
-message UpdateTransferConfigRequest {
- // Required. Data transfer configuration to create.
- TransferConfig transfer_config = 1 [(google.api.field_behavior) = REQUIRED];
-
- // Optional OAuth2 authorization code to use with this transfer configuration.
- // If it is provided, the transfer configuration will be associated with the
- // authorizing user.
- // In order to obtain authorization_code, please make a
- // request to
- // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri=
- //
- // * client_id should be OAuth client_id of BigQuery DTS API for the given
- // data source returned by ListDataSources method.
- // * data_source_scopes are the scopes returned by ListDataSources method.
- // * redirect_uri is an optional parameter. If not specified, then
- // authorization code is posted to the opener of authorization flow window.
- // Otherwise it will be sent to the redirect uri. A special value of
- // urn:ietf:wg:oauth:2.0:oob means that authorization code should be
- // returned in the title bar of the browser, with the page text prompting
- // the user to copy the code and paste it in the application.
- string authorization_code = 3;
-
- // Required. Required list of fields to be updated in this request.
- google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED];
-
- // Optional version info. If users want to find a very recent access token,
- // that is, immediately after approving access, users have to set the
- // version_info claim in the token request. To obtain the version_info, users
- // must use the "none+gsession" response type. which be return a
- // version_info back in the authorization response which be be put in a JWT
- // claim in the token request.
- string version_info = 5;
-
- // Optional service account name. If this field is set and
- // "service_account_name" is set in update_mask, transfer config will be
- // updated to use this service account credentials. It requires that
- // requesting user calling this API has permissions to act as this service
- // account.
- string service_account_name = 6;
-}
-
-// A request to get data transfer information.
-message GetTransferConfigRequest {
- // Required. The field will contain name of the resource requested, for example:
- // `projects/{project_id}/transferConfigs/{config_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }
- ];
-}
-
-// A request to delete data transfer information. All associated transfer runs
-// and log messages will be deleted as well.
-message DeleteTransferConfigRequest {
- // Required. The field will contain name of the resource requested, for example:
- // `projects/{project_id}/transferConfigs/{config_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }
- ];
-}
-
-// A request to get data transfer run information.
-message GetTransferRunRequest {
- // Required. The field will contain name of the resource requested, for example:
- // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/Run"
- }
- ];
-}
-
-// A request to delete data transfer run information.
-message DeleteTransferRunRequest {
- // Required. The field will contain name of the resource requested, for example:
- // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/Run"
- }
- ];
-}
-
-// A request to list data transfers configured for a BigQuery project.
-message ListTransferConfigsRequest {
- // Required. The BigQuery project id for which data sources
- // should be returned: `projects/{project_id}` or
- // `projects/{project_id}/locations/{location_id}`
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- child_type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }
- ];
-
- // When specified, only configurations of requested data sources are returned.
- repeated string data_source_ids = 2;
-
- // Pagination token, which can be used to request a specific page
- // of `ListTransfersRequest` list results. For multiple-page
- // results, `ListTransfersResponse` outputs
- // a `next_page` token, which can be used as the
- // `page_token` value to request the next page of list results.
- string page_token = 3;
-
- // Page size. The default page size is the maximum value of 1000 results.
- int32 page_size = 4;
-}
-
-// The returned list of pipelines in the project.
-message ListTransferConfigsResponse {
- // Output only. The stored pipeline transfer configurations.
- repeated TransferConfig transfer_configs = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. The next-pagination token. For multiple-page list results,
- // this token can be used as the
- // `ListTransferConfigsRequest.page_token`
- // to request the next page of list results.
- string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
-}
-
-// A request to list data transfer runs. UI can use this method to show/filter
-// specific data transfer runs. The data source can use this method to request
-// all scheduled transfer runs.
-message ListTransferRunsRequest {
- // Represents which runs should be pulled.
- enum RunAttempt {
- // All runs should be returned.
- RUN_ATTEMPT_UNSPECIFIED = 0;
-
- // Only latest run per day should be returned.
- LATEST = 1;
- }
-
- // Required. Name of transfer configuration for which transfer runs should be retrieved.
- // Format of transfer configuration resource name is:
- // `projects/{project_id}/transferConfigs/{config_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- child_type: "bigquerydatatransfer.googleapis.com/Run"
- }
- ];
-
- // When specified, only transfer runs with requested states are returned.
- repeated TransferState states = 2;
-
- // Pagination token, which can be used to request a specific page
- // of `ListTransferRunsRequest` list results. For multiple-page
- // results, `ListTransferRunsResponse` outputs
- // a `next_page` token, which can be used as the
- // `page_token` value to request the next page of list results.
- string page_token = 3;
-
- // Page size. The default page size is the maximum value of 1000 results.
- int32 page_size = 4;
-
- // Indicates how run attempts are to be pulled.
- RunAttempt run_attempt = 5;
-}
-
-// The returned list of pipelines in the project.
-message ListTransferRunsResponse {
- // Output only. The stored pipeline transfer runs.
- repeated TransferRun transfer_runs = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. The next-pagination token. For multiple-page list results,
- // this token can be used as the
- // `ListTransferRunsRequest.page_token`
- // to request the next page of list results.
- string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
-}
-
-// A request to get user facing log messages associated with data transfer run.
-message ListTransferLogsRequest {
- // Required. Transfer run name in the form:
- // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/Run"
- }
- ];
-
- // Pagination token, which can be used to request a specific page
- // of `ListTransferLogsRequest` list results. For multiple-page
- // results, `ListTransferLogsResponse` outputs
- // a `next_page` token, which can be used as the
- // `page_token` value to request the next page of list results.
- string page_token = 4;
-
- // Page size. The default page size is the maximum value of 1000 results.
- int32 page_size = 5;
-
- // Message types to return. If not populated - INFO, WARNING and ERROR
- // messages are returned.
- repeated TransferMessage.MessageSeverity message_types = 6;
-}
-
-// The returned list transfer run messages.
-message ListTransferLogsResponse {
- // Output only. The stored pipeline transfer messages.
- repeated TransferMessage transfer_messages = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. The next-pagination token. For multiple-page list results,
- // this token can be used as the
- // `GetTransferRunLogRequest.page_token`
- // to request the next page of list results.
- string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
-}
-
-// A request to determine whether the user has valid credentials. This method
-// is used to limit the number of OAuth popups in the user interface. The
-// user id is inferred from the API call context.
-// If the data source has the Google+ authorization type, this method
-// returns false, as it cannot be determined whether the credentials are
-// already valid merely based on the user id.
-message CheckValidCredsRequest {
- // Required. The data source in the form:
- // `projects/{project_id}/dataSources/{data_source_id}` or
- // `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`.
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/DataSource"
- }
- ];
-}
-
-// A response indicating whether the credentials exist and are valid.
-message CheckValidCredsResponse {
- // If set to `true`, the credentials exist and are valid.
- bool has_valid_creds = 1;
-}
-
-// A request to schedule transfer runs for a time range.
-message ScheduleTransferRunsRequest {
- // Required. Transfer configuration name in the form:
- // `projects/{project_id}/transferConfigs/{config_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }
- ];
-
- // Required. Start time of the range of transfer runs. For example,
- // `"2017-05-25T00:00:00+00:00"`.
- google.protobuf.Timestamp start_time = 2 [(google.api.field_behavior) = REQUIRED];
-
- // Required. End time of the range of transfer runs. For example,
- // `"2017-05-30T00:00:00+00:00"`.
- google.protobuf.Timestamp end_time = 3 [(google.api.field_behavior) = REQUIRED];
-}
-
-// A response to schedule transfer runs for a time range.
-message ScheduleTransferRunsResponse {
- // The transfer runs that were scheduled.
- repeated TransferRun runs = 1;
-}
-
-// A request to start manual transfer runs.
-message StartManualTransferRunsRequest {
- // A specification for a time range, this will request transfer runs with
- // run_time between start_time (inclusive) and end_time (exclusive).
- message TimeRange {
- // Start time of the range of transfer runs. For example,
- // `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
- // the end_time. Creates transfer runs where run_time is in the range betwen
- // start_time (inclusive) and end_time (exlusive).
- google.protobuf.Timestamp start_time = 1;
-
- // End time of the range of transfer runs. For example,
- // `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
- // Creates transfer runs where run_time is in the range betwen start_time
- // (inclusive) and end_time (exlusive).
- google.protobuf.Timestamp end_time = 2;
- }
-
- // Transfer configuration name in the form:
- // `projects/{project_id}/transferConfigs/{config_id}` or
- // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
- string parent = 1 [(google.api.resource_reference) = {
- type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- }];
-
- // The requested time specification - this can be a time range or a specific
- // run_time.
- oneof time {
- // Time range for the transfer runs that should be started.
- TimeRange requested_time_range = 3;
-
- // Specific run_time for a transfer run to be started. The
- // requested_run_time must not be in the future.
- google.protobuf.Timestamp requested_run_time = 4;
- }
-}
-
-// A response to start manual transfer runs.
-message StartManualTransferRunsResponse {
- // The transfer runs that were created.
- repeated TransferRun runs = 1;
-}
diff --git a/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto b/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto
deleted file mode 100644
index 5985f096..00000000
--- a/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto
+++ /dev/null
@@ -1,285 +0,0 @@
-// Copyright 2020 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package google.cloud.bigquery.datatransfer.v1;
-
-import "google/api/field_behavior.proto";
-import "google/api/resource.proto";
-import "google/protobuf/struct.proto";
-import "google/protobuf/timestamp.proto";
-import "google/rpc/status.proto";
-
-option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
-option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
-option java_multiple_files = true;
-option java_outer_classname = "TransferProto";
-option java_package = "com.google.cloud.bigquery.datatransfer.v1";
-option objc_class_prefix = "GCBDT";
-option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
-option ruby_package = "Google::Cloud::Bigquery::DataTransfer::V1";
-
-// DEPRECATED. Represents data transfer type.
-enum TransferType {
- option deprecated = true;
-
- // Invalid or Unknown transfer type placeholder.
- TRANSFER_TYPE_UNSPECIFIED = 0;
-
- // Batch data transfer.
- BATCH = 1;
-
- // Streaming data transfer. Streaming data source currently doesn't
- // support multiple transfer configs per project.
- STREAMING = 2;
-}
-
-// Represents data transfer run state.
-enum TransferState {
- // State placeholder.
- TRANSFER_STATE_UNSPECIFIED = 0;
-
- // Data transfer is scheduled and is waiting to be picked up by
- // data transfer backend.
- PENDING = 2;
-
- // Data transfer is in progress.
- RUNNING = 3;
-
- // Data transfer completed successfully.
- SUCCEEDED = 4;
-
- // Data transfer failed.
- FAILED = 5;
-
- // Data transfer is cancelled.
- CANCELLED = 6;
-}
-
-// Represents preferences for sending email notifications for transfer run
-// events.
-message EmailPreferences {
- // If true, email notifications will be sent on transfer run failures.
- bool enable_failure_email = 1;
-}
-
-// Options customizing the data transfer schedule.
-message ScheduleOptions {
- // If true, automatic scheduling of data transfer runs for this configuration
- // will be disabled. The runs can be started on ad-hoc basis using
- // StartManualTransferRuns API. When automatic scheduling is disabled, the
- // TransferConfig.schedule field will be ignored.
- bool disable_auto_scheduling = 3;
-
- // Specifies time to start scheduling transfer runs. The first run will be
- // scheduled at or after the start time according to a recurrence pattern
- // defined in the schedule string. The start time can be changed at any
- // moment. The time when a data transfer can be trigerred manually is not
- // limited by this option.
- google.protobuf.Timestamp start_time = 1;
-
- // Defines time to stop scheduling transfer runs. A transfer run cannot be
- // scheduled at or after the end time. The end time can be changed at any
- // moment. The time when a data transfer can be trigerred manually is not
- // limited by this option.
- google.protobuf.Timestamp end_time = 2;
-}
-
-// Represents a data transfer configuration. A transfer configuration
-// contains all metadata needed to perform a data transfer. For example,
-// `destination_dataset_id` specifies where data should be stored.
-// When a new transfer configuration is created, the specified
-// `destination_dataset_id` is created when needed and shared with the
-// appropriate data source service account.
-message TransferConfig {
- option (google.api.resource) = {
- type: "bigquerydatatransfer.googleapis.com/TransferConfig"
- pattern: "projects/{project}/transferConfigs/{transfer_config}"
- pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}"
- };
-
- // The resource name of the transfer config.
- // Transfer config names have the form of
- // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
- // The name is automatically generated based on the config_id specified in
- // CreateTransferConfigRequest along with project_id and region. If config_id
- // is not provided, usually a uuid, even though it is not guaranteed or
- // required, will be generated for config_id.
- string name = 1;
-
- // The desination of the transfer config.
- oneof destination {
- // The BigQuery target dataset id.
- string destination_dataset_id = 2;
- }
-
- // User specified display name for the data transfer.
- string display_name = 3;
-
- // Data source id. Cannot be changed once data transfer is created.
- string data_source_id = 5;
-
- // Data transfer specific parameters.
- google.protobuf.Struct params = 9;
-
- // Data transfer schedule.
- // If the data source does not support a custom schedule, this should be
- // empty. If it is empty, the default value for the data source will be
- // used.
- // The specified times are in UTC.
- // Examples of valid format:
- // `1st,3rd monday of month 15:30`,
- // `every wed,fri of jan,jun 13:15`, and
- // `first sunday of quarter 00:00`.
- // See more explanation about the format here:
- // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
- // NOTE: the granularity should be at least 8 hours, or less frequent.
- string schedule = 7;
-
- // Options customizing the data transfer schedule.
- ScheduleOptions schedule_options = 24;
-
- // The number of days to look back to automatically refresh the data.
- // For example, if `data_refresh_window_days = 10`, then every day
- // BigQuery reingests data for [today-10, today-1], rather than ingesting data
- // for just [today-1].
- // Only valid if the data source supports the feature. Set the value to 0
- // to use the default value.
- int32 data_refresh_window_days = 12;
-
- // Is this config disabled. When set to true, no runs are scheduled
- // for a given transfer.
- bool disabled = 13;
-
- // Output only. Data transfer modification time. Ignored by server on input.
- google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Next time when data transfer will run.
- google.protobuf.Timestamp next_run_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. State of the most recently updated transfer run.
- TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Deprecated. Unique ID of the user on whose behalf transfer is done.
- int64 user_id = 11;
-
- // Output only. Region in which BigQuery dataset is located.
- string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Pub/Sub topic where notifications will be sent after transfer runs
- // associated with this transfer config finish.
- string notification_pubsub_topic = 15;
-
- // Email notifications will be sent according to these preferences
- // to the email address of the user who owns this transfer config.
- EmailPreferences email_preferences = 18;
-}
-
-// Represents a data transfer run.
-message TransferRun {
- option (google.api.resource) = {
- type: "bigquerydatatransfer.googleapis.com/Run"
- pattern: "projects/{project}/transferConfigs/{transfer_config}/runs/{run}"
- pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}"
- };
-
- // The resource name of the transfer run.
- // Transfer run names have the form
- // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
- // The name is ignored when creating a transfer run.
- string name = 1;
-
- // Minimum time after which a transfer run can be started.
- google.protobuf.Timestamp schedule_time = 3;
-
- // For batch transfer runs, specifies the date and time of the data should be
- // ingested.
- google.protobuf.Timestamp run_time = 10;
-
- // Status of the transfer run.
- google.rpc.Status error_status = 21;
-
- // Output only. Time when transfer run was started.
- // Parameter ignored by server for input requests.
- google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Time when transfer run ended.
- // Parameter ignored by server for input requests.
- google.protobuf.Timestamp end_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Last time the data transfer run state was updated.
- google.protobuf.Timestamp update_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Data transfer specific parameters.
- google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Data transfer destination.
- oneof destination {
- // Output only. The BigQuery target dataset id.
- string destination_dataset_id = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
- }
-
- // Output only. Data source id.
- string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Data transfer run state. Ignored for input requests.
- TransferState state = 8;
-
- // Deprecated. Unique ID of the user on whose behalf transfer is done.
- int64 user_id = 11;
-
- // Output only. Describes the schedule of this transfer run if it was
- // created as part of a regular schedule. For batch transfer runs that are
- // scheduled manually, this is empty.
- // NOTE: the system might choose to delay the schedule depending on the
- // current load, so `schedule_time` doesn't always match this.
- string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Pub/Sub topic where a notification will be sent after this
- // transfer run finishes
- string notification_pubsub_topic = 23 [(google.api.field_behavior) = OUTPUT_ONLY];
-
- // Output only. Email notifications will be sent according to these
- // preferences to the email address of the user who owns the transfer config
- // this run was derived from.
- EmailPreferences email_preferences = 25 [(google.api.field_behavior) = OUTPUT_ONLY];
-}
-
-// Represents a user facing message for a particular data transfer run.
-message TransferMessage {
- // Represents data transfer user facing message severity.
- enum MessageSeverity {
- // No severity specified.
- MESSAGE_SEVERITY_UNSPECIFIED = 0;
-
- // Informational message.
- INFO = 1;
-
- // Warning message.
- WARNING = 2;
-
- // Error message.
- ERROR = 3;
- }
-
- // Time when message was logged.
- google.protobuf.Timestamp message_time = 1;
-
- // Message severity.
- MessageSeverity severity = 2;
-
- // Message text.
- string message_text = 3;
-}
diff --git a/google/cloud/bigquery_datatransfer_v1/services/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/__init__.py
index 42ffdf2b..4de65971 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/__init__.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py
index b64f150a..392ecf49 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .client import DataTransferServiceClient
from .async_client import DataTransferServiceAsyncClient
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
index 47ac9f6d..d0454623 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
import functools
import re
@@ -22,21 +20,20 @@
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import duration_pb2 as duration # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.protobuf import struct_pb2 as struct # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from google.rpc import status_pb2 as status # type: ignore
-
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
+from google.protobuf import struct_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport
from .client import DataTransferServiceClient
@@ -64,31 +61,26 @@ class DataTransferServiceAsyncClient:
parse_transfer_config_path = staticmethod(
DataTransferServiceClient.parse_transfer_config_path
)
-
common_billing_account_path = staticmethod(
DataTransferServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
DataTransferServiceClient.parse_common_billing_account_path
)
-
common_folder_path = staticmethod(DataTransferServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
DataTransferServiceClient.parse_common_folder_path
)
-
common_organization_path = staticmethod(
DataTransferServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
DataTransferServiceClient.parse_common_organization_path
)
-
common_project_path = staticmethod(DataTransferServiceClient.common_project_path)
parse_common_project_path = staticmethod(
DataTransferServiceClient.parse_common_project_path
)
-
common_location_path = staticmethod(DataTransferServiceClient.common_location_path)
parse_common_location_path = staticmethod(
DataTransferServiceClient.parse_common_location_path
@@ -96,7 +88,8 @@ class DataTransferServiceAsyncClient:
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
- """Creates an instance of this client using the provided credentials info.
+ """Creates an instance of this client using the provided credentials
+ info.
Args:
info (dict): The service account private key info.
@@ -111,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
- file.
+ file.
Args:
filename (str): The path to the service account private key json
@@ -128,7 +121,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
@property
def transport(self) -> DataTransferServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
DataTransferServiceTransport: The transport used by the client instance.
@@ -143,12 +136,12 @@ def transport(self) -> DataTransferServiceTransport:
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
transport: Union[str, DataTransferServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the data transfer service client.
+ """Instantiates the data transfer service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -180,7 +173,6 @@ def __init__(
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
-
self._client = DataTransferServiceClient(
credentials=credentials,
transport=transport,
@@ -213,7 +205,6 @@ async def get_data_source(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -241,7 +232,6 @@ async def get_data_source(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -254,7 +244,8 @@ async def get_data_source(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -299,7 +290,6 @@ async def list_data_sources(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -329,7 +319,6 @@ async def list_data_sources(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -342,7 +331,8 @@ async def list_data_sources(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -408,7 +398,6 @@ async def create_transfer_config(
This corresponds to the ``transfer_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -441,7 +430,6 @@ async def create_transfer_config(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if transfer_config is not None:
@@ -472,7 +460,7 @@ async def update_transfer_config(
request: datatransfer.UpdateTransferConfigRequest = None,
*,
transfer_config: transfer.TransferConfig = None,
- update_mask: field_mask.FieldMask = None,
+ update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -500,7 +488,6 @@ async def update_transfer_config(
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -533,7 +520,6 @@ async def update_transfer_config(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if transfer_config is not None:
request.transfer_config = transfer_config
if update_mask is not None:
@@ -587,7 +573,6 @@ async def delete_transfer_config(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -608,7 +593,6 @@ async def delete_transfer_config(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -621,7 +605,8 @@ async def delete_transfer_config(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -664,7 +649,6 @@ async def get_transfer_config(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -697,7 +681,6 @@ async def get_transfer_config(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -710,7 +693,8 @@ async def get_transfer_config(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -754,7 +738,6 @@ async def list_transfer_configs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -784,7 +767,6 @@ async def list_transfer_configs(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -797,7 +779,8 @@ async def list_transfer_configs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -828,8 +811,8 @@ async def schedule_transfer_runs(
request: datatransfer.ScheduleTransferRunsRequest = None,
*,
parent: str = None,
- start_time: timestamp.Timestamp = None,
- end_time: timestamp.Timestamp = None,
+ start_time: timestamp_pb2.Timestamp = None,
+ end_time: timestamp_pb2.Timestamp = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -866,7 +849,6 @@ async def schedule_transfer_runs(
This corresponds to the ``end_time`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -893,7 +875,6 @@ async def schedule_transfer_runs(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if start_time is not None:
@@ -938,7 +919,6 @@ async def start_manual_transfer_runs(
request (:class:`google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest`):
The request object. A request to start manual transfer
runs.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -952,7 +932,6 @@ async def start_manual_transfer_runs(
"""
# Create or coerce a protobuf request object.
-
request = datatransfer.StartManualTransferRunsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1001,7 +980,6 @@ async def get_transfer_run(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1026,7 +1004,6 @@ async def get_transfer_run(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -1039,7 +1016,8 @@ async def get_transfer_run(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -1084,7 +1062,6 @@ async def delete_transfer_run(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1105,7 +1082,6 @@ async def delete_transfer_run(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -1118,7 +1094,8 @@ async def delete_transfer_run(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -1164,7 +1141,6 @@ async def list_transfer_runs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1194,7 +1170,6 @@ async def list_transfer_runs(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -1207,7 +1182,8 @@ async def list_transfer_runs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -1258,7 +1234,6 @@ async def list_transfer_logs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1288,7 +1263,6 @@ async def list_transfer_logs(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -1301,7 +1275,8 @@ async def list_transfer_logs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -1363,7 +1338,6 @@ async def check_valid_creds(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1390,7 +1364,6 @@ async def check_valid_creds(
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -1403,7 +1376,8 @@ async def check_valid_creds(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
index 4aac13d8..b0dab00f 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from distutils import util
import os
@@ -23,10 +21,10 @@
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
@@ -35,12 +33,11 @@
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import duration_pb2 as duration # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.protobuf import struct_pb2 as struct # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from google.rpc import status_pb2 as status # type: ignore
-
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
+from google.protobuf import struct_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import DataTransferServiceGrpcTransport
from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport
@@ -63,7 +60,7 @@ class DataTransferServiceClientMeta(type):
def get_transport_class(
cls, label: str = None,
) -> Type[DataTransferServiceTransport]:
- """Return an appropriate transport class.
+ """Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
@@ -90,7 +87,8 @@ class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
- """Convert api endpoint to mTLS endpoint.
+ """Converts api endpoint to mTLS endpoint.
+
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
@@ -124,7 +122,8 @@ def _get_default_mtls_endpoint(api_endpoint):
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
- """Creates an instance of this client using the provided credentials info.
+ """Creates an instance of this client using the provided credentials
+ info.
Args:
info (dict): The service account private key info.
@@ -141,7 +140,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
- file.
+ file.
Args:
filename (str): The path to the service account private key json
@@ -160,23 +159,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
@property
def transport(self) -> DataTransferServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
- DataTransferServiceTransport: The transport used by the client instance.
+ DataTransferServiceTransport: The transport used by the client
+ instance.
"""
return self._transport
@staticmethod
def data_source_path(project: str, data_source: str,) -> str:
- """Return a fully-qualified data_source string."""
+ """Returns a fully-qualified data_source string."""
return "projects/{project}/dataSources/{data_source}".format(
project=project, data_source=data_source,
)
@staticmethod
def parse_data_source_path(path: str) -> Dict[str, str]:
- """Parse a data_source path into its component segments."""
+ """Parses a data_source path into its component segments."""
m = re.match(
r"^projects/(?P.+?)/dataSources/(?P.+?)$", path
)
@@ -184,14 +184,14 @@ def parse_data_source_path(path: str) -> Dict[str, str]:
@staticmethod
def run_path(project: str, transfer_config: str, run: str,) -> str:
- """Return a fully-qualified run string."""
+ """Returns a fully-qualified run string."""
return "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(
project=project, transfer_config=transfer_config, run=run,
)
@staticmethod
def parse_run_path(path: str) -> Dict[str, str]:
- """Parse a run path into its component segments."""
+ """Parses a run path into its component segments."""
m = re.match(
r"^projects/(?P.+?)/transferConfigs/(?P.+?)/runs/(?P.+?)$",
path,
@@ -200,14 +200,14 @@ def parse_run_path(path: str) -> Dict[str, str]:
@staticmethod
def transfer_config_path(project: str, transfer_config: str,) -> str:
- """Return a fully-qualified transfer_config string."""
+ """Returns a fully-qualified transfer_config string."""
return "projects/{project}/transferConfigs/{transfer_config}".format(
project=project, transfer_config=transfer_config,
)
@staticmethod
def parse_transfer_config_path(path: str) -> Dict[str, str]:
- """Parse a transfer_config path into its component segments."""
+ """Parses a transfer_config path into its component segments."""
m = re.match(
r"^projects/(?P.+?)/transferConfigs/(?P.+?)$",
path,
@@ -216,7 +216,7 @@ def parse_transfer_config_path(path: str) -> Dict[str, str]:
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
- """Return a fully-qualified billing_account string."""
+ """Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -229,7 +229,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
@staticmethod
def common_folder_path(folder: str,) -> str:
- """Return a fully-qualified folder string."""
+ """Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
@@ -240,7 +240,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]:
@staticmethod
def common_organization_path(organization: str,) -> str:
- """Return a fully-qualified organization string."""
+ """Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
@@ -251,7 +251,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]:
@staticmethod
def common_project_path(project: str,) -> str:
- """Return a fully-qualified project string."""
+ """Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
@@ -262,7 +262,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]:
@staticmethod
def common_location_path(project: str, location: str,) -> str:
- """Return a fully-qualified location string."""
+ """Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -276,12 +276,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def __init__(
self,
*,
- credentials: Optional[credentials.Credentials] = None,
+ credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, DataTransferServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the data transfer service client.
+ """Instantiates the data transfer service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -336,9 +336,10 @@ def __init__(
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
- client_cert_source_func = (
- mtls.default_client_cert_source() if is_mtls else None
- )
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -350,12 +351,14 @@ def __init__(
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
- )
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
)
# Save or instantiate the transport.
@@ -370,8 +373,8 @@ def __init__(
)
if client_options.scopes:
raise ValueError(
- "When providing a transport instance, "
- "provide its scopes directly."
+ "When providing a transport instance, provide its scopes "
+ "directly."
)
self._transport = transport
else:
@@ -411,7 +414,6 @@ def get_data_source(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -441,10 +443,8 @@ def get_data_source(
# there are no flattened fields.
if not isinstance(request, datatransfer.GetDataSourceRequest):
request = datatransfer.GetDataSourceRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -489,7 +489,6 @@ def list_data_sources(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -521,10 +520,8 @@ def list_data_sources(
# there are no flattened fields.
if not isinstance(request, datatransfer.ListDataSourcesRequest):
request = datatransfer.ListDataSourcesRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -590,7 +587,6 @@ def create_transfer_config(
This corresponds to the ``transfer_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -625,10 +621,8 @@ def create_transfer_config(
# there are no flattened fields.
if not isinstance(request, datatransfer.CreateTransferConfigRequest):
request = datatransfer.CreateTransferConfigRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if transfer_config is not None:
@@ -655,7 +649,7 @@ def update_transfer_config(
request: datatransfer.UpdateTransferConfigRequest = None,
*,
transfer_config: transfer.TransferConfig = None,
- update_mask: field_mask.FieldMask = None,
+ update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -683,7 +677,6 @@ def update_transfer_config(
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -718,10 +711,8 @@ def update_transfer_config(
# there are no flattened fields.
if not isinstance(request, datatransfer.UpdateTransferConfigRequest):
request = datatransfer.UpdateTransferConfigRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if transfer_config is not None:
request.transfer_config = transfer_config
if update_mask is not None:
@@ -771,7 +762,6 @@ def delete_transfer_config(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -794,10 +784,8 @@ def delete_transfer_config(
# there are no flattened fields.
if not isinstance(request, datatransfer.DeleteTransferConfigRequest):
request = datatransfer.DeleteTransferConfigRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -840,7 +828,6 @@ def get_transfer_config(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -875,10 +862,8 @@ def get_transfer_config(
# there are no flattened fields.
if not isinstance(request, datatransfer.GetTransferConfigRequest):
request = datatransfer.GetTransferConfigRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -922,7 +907,6 @@ def list_transfer_configs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -954,10 +938,8 @@ def list_transfer_configs(
# there are no flattened fields.
if not isinstance(request, datatransfer.ListTransferConfigsRequest):
request = datatransfer.ListTransferConfigsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -988,8 +970,8 @@ def schedule_transfer_runs(
request: datatransfer.ScheduleTransferRunsRequest = None,
*,
parent: str = None,
- start_time: timestamp.Timestamp = None,
- end_time: timestamp.Timestamp = None,
+ start_time: timestamp_pb2.Timestamp = None,
+ end_time: timestamp_pb2.Timestamp = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -1026,7 +1008,6 @@ def schedule_transfer_runs(
This corresponds to the ``end_time`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1055,10 +1036,8 @@ def schedule_transfer_runs(
# there are no flattened fields.
if not isinstance(request, datatransfer.ScheduleTransferRunsRequest):
request = datatransfer.ScheduleTransferRunsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
if start_time is not None:
@@ -1099,7 +1078,6 @@ def start_manual_transfer_runs(
request (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest):
The request object. A request to start manual transfer
runs.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1113,7 +1091,6 @@ def start_manual_transfer_runs(
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a datatransfer.StartManualTransferRunsRequest.
# There's no risk of modifying the input as we've already verified
@@ -1165,7 +1142,6 @@ def get_transfer_run(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1192,10 +1168,8 @@ def get_transfer_run(
# there are no flattened fields.
if not isinstance(request, datatransfer.GetTransferRunRequest):
request = datatransfer.GetTransferRunRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -1240,7 +1214,6 @@ def delete_transfer_run(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1263,10 +1236,8 @@ def delete_transfer_run(
# there are no flattened fields.
if not isinstance(request, datatransfer.DeleteTransferRunRequest):
request = datatransfer.DeleteTransferRunRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
@@ -1312,7 +1283,6 @@ def list_transfer_runs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1344,10 +1314,8 @@ def list_transfer_runs(
# there are no flattened fields.
if not isinstance(request, datatransfer.ListTransferRunsRequest):
request = datatransfer.ListTransferRunsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -1398,7 +1366,6 @@ def list_transfer_logs(
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1430,10 +1397,8 @@ def list_transfer_logs(
# there are no flattened fields.
if not isinstance(request, datatransfer.ListTransferLogsRequest):
request = datatransfer.ListTransferLogsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if parent is not None:
request.parent = parent
@@ -1495,7 +1460,6 @@ def check_valid_creds(
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1524,10 +1488,8 @@ def check_valid_creds(
# there are no flattened fields.
if not isinstance(request, datatransfer.CheckValidCredsRequest):
request = datatransfer.CheckValidCredsRequest(request)
-
# If we have keyword arguments corresponding to fields on the
# request, apply these.
-
if name is not None:
request.name = name
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py
index 425e8eed..5c238be5 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from typing import (
Any,
AsyncIterable,
@@ -118,7 +116,7 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
@@ -246,7 +244,7 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
@@ -374,7 +372,7 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
@@ -502,7 +500,7 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py
index 097e5854..cf114cd1 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from typing import Dict, Type
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
index 104b9bff..830861c5 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,21 +13,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import abc
-import typing
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+import packaging.version
import pkg_resources
-from google import auth # type: ignore
-from google.api_core import exceptions # type: ignore
+import google.auth # type: ignore
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -39,27 +38,41 @@
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+try:
+ # google.auth.__version__ was added in 1.26.0
+ _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+ try: # try pkg_resources if it is available
+ _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+ except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GOOGLE_AUTH_VERSION = None
+
+_API_CORE_VERSION = google.api_core.__version__
+
class DataTransferServiceTransport(abc.ABC):
"""Abstract transport class for DataTransferService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+ DEFAULT_HOST: str = "bigquerydatatransfer.googleapis.com"
+
def __init__(
self,
*,
- host: str = "bigquerydatatransfer.googleapis.com",
- credentials: credentials.Credentials = None,
- credentials_file: typing.Optional[str] = None,
- scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
- quota_project_id: typing.Optional[str] = None,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -68,7 +81,7 @@ def __init__(
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
- scope (Optional[Sequence[str]]): A list of scopes.
+ scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -82,29 +95,76 @@ def __init__(
host += ":443"
self._host = host
+ scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
+
# Save the scopes.
self._scopes = scopes or self.AUTH_SCOPES
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
- raise exceptions.DuplicateCredentialArgs(
+ raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
- credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
- credentials, _ = auth.default(
- scopes=self._scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
+ # TODO(busunkim): These two class methods are in the base transport
+ # to avoid duplicating code across the transport classes. These functions
+ # should be deleted once the minimum required versions of google-api-core
+ # and google-auth are increased.
+
+ # TODO: Remove this function once google-auth >= 1.25.0 is required
+ @classmethod
+ def _get_scopes_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Optional[Sequence[str]]]:
+ """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
+
+ scopes_kwargs = {}
+
+ if _GOOGLE_AUTH_VERSION and (
+ packaging.version.parse(_GOOGLE_AUTH_VERSION)
+ >= packaging.version.parse("1.25.0")
+ ):
+ scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
+ else:
+ scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
+
+ return scopes_kwargs
+
+ # TODO: Remove this function once google-api-core >= 1.26.0 is required
+ @classmethod
+ def _get_self_signed_jwt_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
+ """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
+
+ self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
+
+ if _API_CORE_VERSION and (
+ packaging.version.parse(_API_CORE_VERSION)
+ >= packaging.version.parse("1.26.0")
+ ):
+ self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
+ self_signed_jwt_kwargs["scopes"] = scopes
+ self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
+ else:
+ self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
+
+ return self_signed_jwt_kwargs
+
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -115,7 +175,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -129,7 +190,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -153,7 +215,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -167,7 +230,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -181,7 +245,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -205,7 +270,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -219,7 +285,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -233,7 +300,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -247,7 +315,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -261,7 +330,8 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
),
deadline=20.0,
),
@@ -273,22 +343,20 @@ def _prep_wrapped_messages(self, client_info):
@property
def get_data_source(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.GetDataSourceRequest],
- typing.Union[
- datatransfer.DataSource, typing.Awaitable[datatransfer.DataSource]
- ],
+ Union[datatransfer.DataSource, Awaitable[datatransfer.DataSource]],
]:
raise NotImplementedError()
@property
def list_data_sources(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.ListDataSourcesRequest],
- typing.Union[
+ Union[
datatransfer.ListDataSourcesResponse,
- typing.Awaitable[datatransfer.ListDataSourcesResponse],
+ Awaitable[datatransfer.ListDataSourcesResponse],
],
]:
raise NotImplementedError()
@@ -296,53 +364,47 @@ def list_data_sources(
@property
def create_transfer_config(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.CreateTransferConfigRequest],
- typing.Union[
- transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig]
- ],
+ Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]],
]:
raise NotImplementedError()
@property
def update_transfer_config(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.UpdateTransferConfigRequest],
- typing.Union[
- transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig]
- ],
+ Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]],
]:
raise NotImplementedError()
@property
def delete_transfer_config(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.DeleteTransferConfigRequest],
- typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_transfer_config(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.GetTransferConfigRequest],
- typing.Union[
- transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig]
- ],
+ Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]],
]:
raise NotImplementedError()
@property
def list_transfer_configs(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.ListTransferConfigsRequest],
- typing.Union[
+ Union[
datatransfer.ListTransferConfigsResponse,
- typing.Awaitable[datatransfer.ListTransferConfigsResponse],
+ Awaitable[datatransfer.ListTransferConfigsResponse],
],
]:
raise NotImplementedError()
@@ -350,11 +412,11 @@ def list_transfer_configs(
@property
def schedule_transfer_runs(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.ScheduleTransferRunsRequest],
- typing.Union[
+ Union[
datatransfer.ScheduleTransferRunsResponse,
- typing.Awaitable[datatransfer.ScheduleTransferRunsResponse],
+ Awaitable[datatransfer.ScheduleTransferRunsResponse],
],
]:
raise NotImplementedError()
@@ -362,11 +424,11 @@ def schedule_transfer_runs(
@property
def start_manual_transfer_runs(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.StartManualTransferRunsRequest],
- typing.Union[
+ Union[
datatransfer.StartManualTransferRunsResponse,
- typing.Awaitable[datatransfer.StartManualTransferRunsResponse],
+ Awaitable[datatransfer.StartManualTransferRunsResponse],
],
]:
raise NotImplementedError()
@@ -374,29 +436,29 @@ def start_manual_transfer_runs(
@property
def get_transfer_run(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.GetTransferRunRequest],
- typing.Union[transfer.TransferRun, typing.Awaitable[transfer.TransferRun]],
+ Union[transfer.TransferRun, Awaitable[transfer.TransferRun]],
]:
raise NotImplementedError()
@property
def delete_transfer_run(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.DeleteTransferRunRequest],
- typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def list_transfer_runs(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.ListTransferRunsRequest],
- typing.Union[
+ Union[
datatransfer.ListTransferRunsResponse,
- typing.Awaitable[datatransfer.ListTransferRunsResponse],
+ Awaitable[datatransfer.ListTransferRunsResponse],
],
]:
raise NotImplementedError()
@@ -404,11 +466,11 @@ def list_transfer_runs(
@property
def list_transfer_logs(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.ListTransferLogsRequest],
- typing.Union[
+ Union[
datatransfer.ListTransferLogsResponse,
- typing.Awaitable[datatransfer.ListTransferLogsResponse],
+ Awaitable[datatransfer.ListTransferLogsResponse],
],
]:
raise NotImplementedError()
@@ -416,11 +478,11 @@ def list_transfer_logs(
@property
def check_valid_creds(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[datatransfer.CheckValidCredsRequest],
- typing.Union[
+ Union[
datatransfer.CheckValidCredsResponse,
- typing.Awaitable[datatransfer.CheckValidCredsResponse],
+ Awaitable[datatransfer.CheckValidCredsResponse],
],
]:
raise NotImplementedError()
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
index c6c39bb4..9181ae48 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,22 +13,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Callable, Dict, Optional, Sequence, Tuple
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.protobuf import empty_pb2 # type: ignore
from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO
@@ -55,7 +52,7 @@ def __init__(
self,
*,
host: str = "bigquerydatatransfer.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
@@ -69,7 +66,8 @@ def __init__(
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -179,7 +177,7 @@ def __init__(
def create_channel(
cls,
host: str = "bigquerydatatransfer.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -210,13 +208,15 @@ def create_channel(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
+ self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
+
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ **self_signed_jwt_kwargs,
**kwargs,
)
@@ -338,7 +338,7 @@ def update_transfer_config(
@property
def delete_transfer_config(
self,
- ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty.Empty]:
+ ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]:
r"""Return a callable for the delete transfer config method over gRPC.
Deletes a data transfer configuration,
@@ -358,7 +358,7 @@ def delete_transfer_config(
self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig",
request_serializer=datatransfer.DeleteTransferConfigRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_transfer_config"]
@@ -513,7 +513,7 @@ def get_transfer_run(
@property
def delete_transfer_run(
self,
- ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty.Empty]:
+ ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty_pb2.Empty]:
r"""Return a callable for the delete transfer run method over gRPC.
Deletes the specified transfer run.
@@ -532,7 +532,7 @@ def delete_transfer_run(
self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun",
request_serializer=datatransfer.DeleteTransferRunRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_transfer_run"]
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
index 00cb6fb6..2cd986db 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,23 +13,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import empty_pb2 as empty # type: ignore
-
+from google.protobuf import empty_pb2 # type: ignore
from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import DataTransferServiceGrpcTransport
@@ -58,7 +55,7 @@ class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport):
def create_channel(
cls,
host: str = "bigquerydatatransfer.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -85,13 +82,15 @@ def create_channel(
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
+ self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
+
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ **self_signed_jwt_kwargs,
**kwargs,
)
@@ -99,7 +98,7 @@ def __init__(
self,
*,
host: str = "bigquerydatatransfer.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
@@ -113,7 +112,8 @@ def __init__(
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -171,7 +171,6 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
-
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
@@ -349,7 +348,9 @@ def update_transfer_config(
@property
def delete_transfer_config(
self,
- ) -> Callable[[datatransfer.DeleteTransferConfigRequest], Awaitable[empty.Empty]]:
+ ) -> Callable[
+ [datatransfer.DeleteTransferConfigRequest], Awaitable[empty_pb2.Empty]
+ ]:
r"""Return a callable for the delete transfer config method over gRPC.
Deletes a data transfer configuration,
@@ -369,7 +370,7 @@ def delete_transfer_config(
self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig",
request_serializer=datatransfer.DeleteTransferConfigRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_transfer_config"]
@@ -528,7 +529,7 @@ def get_transfer_run(
@property
def delete_transfer_run(
self,
- ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty.Empty]]:
+ ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete transfer run method over gRPC.
Deletes the specified transfer run.
@@ -547,7 +548,7 @@ def delete_transfer_run(
self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary(
"/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun",
request_serializer=datatransfer.DeleteTransferRunRequest.serialize,
- response_deserializer=empty.Empty.FromString,
+ response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_transfer_run"]
diff --git a/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/google/cloud/bigquery_datatransfer_v1/types/__init__.py
index b886143f..b79fc3ff 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/__init__.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .datatransfer import (
CheckValidCredsRequest,
CheckValidCredsResponse,
diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
index 9b92f388..d9869c0d 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,15 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
from google.cloud.bigquery_datatransfer_v1.types import transfer
-from google.protobuf import duration_pb2 as duration # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from google.protobuf import wrappers_pb2 as wrappers # type: ignore
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.protobuf import wrappers_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -116,39 +113,24 @@ class Type(proto.Enum):
RECORD = 5
PLUS_PAGE = 6
- param_id = proto.Field(proto.STRING, number=1)
-
- display_name = proto.Field(proto.STRING, number=2)
-
- description = proto.Field(proto.STRING, number=3)
-
+ param_id = proto.Field(proto.STRING, number=1,)
+ display_name = proto.Field(proto.STRING, number=2,)
+ description = proto.Field(proto.STRING, number=3,)
type_ = proto.Field(proto.ENUM, number=4, enum=Type,)
-
- required = proto.Field(proto.BOOL, number=5)
-
- repeated = proto.Field(proto.BOOL, number=6)
-
- validation_regex = proto.Field(proto.STRING, number=7)
-
- allowed_values = proto.RepeatedField(proto.STRING, number=8)
-
- min_value = proto.Field(proto.MESSAGE, number=9, message=wrappers.DoubleValue,)
-
- max_value = proto.Field(proto.MESSAGE, number=10, message=wrappers.DoubleValue,)
-
+ required = proto.Field(proto.BOOL, number=5,)
+ repeated = proto.Field(proto.BOOL, number=6,)
+ validation_regex = proto.Field(proto.STRING, number=7,)
+ allowed_values = proto.RepeatedField(proto.STRING, number=8,)
+ min_value = proto.Field(proto.MESSAGE, number=9, message=wrappers_pb2.DoubleValue,)
+ max_value = proto.Field(proto.MESSAGE, number=10, message=wrappers_pb2.DoubleValue,)
fields = proto.RepeatedField(
proto.MESSAGE, number=11, message="DataSourceParameter",
)
-
- validation_description = proto.Field(proto.STRING, number=12)
-
- validation_help_url = proto.Field(proto.STRING, number=13)
-
- immutable = proto.Field(proto.BOOL, number=14)
-
- recurse = proto.Field(proto.BOOL, number=15)
-
- deprecated = proto.Field(proto.BOOL, number=20)
+ validation_description = proto.Field(proto.STRING, number=12,)
+ validation_help_url = proto.Field(proto.STRING, number=13,)
+ immutable = proto.Field(proto.BOOL, number=14,)
+ recurse = proto.Field(proto.BOOL, number=15,)
+ deprecated = proto.Field(proto.BOOL, number=20,)
class DataSource(proto.Message):
@@ -228,50 +210,32 @@ class DataRefreshType(proto.Enum):
SLIDING_WINDOW = 1
CUSTOM_SLIDING_WINDOW = 2
- name = proto.Field(proto.STRING, number=1)
-
- data_source_id = proto.Field(proto.STRING, number=2)
-
- display_name = proto.Field(proto.STRING, number=3)
-
- description = proto.Field(proto.STRING, number=4)
-
- client_id = proto.Field(proto.STRING, number=5)
-
- scopes = proto.RepeatedField(proto.STRING, number=6)
-
+ name = proto.Field(proto.STRING, number=1,)
+ data_source_id = proto.Field(proto.STRING, number=2,)
+ display_name = proto.Field(proto.STRING, number=3,)
+ description = proto.Field(proto.STRING, number=4,)
+ client_id = proto.Field(proto.STRING, number=5,)
+ scopes = proto.RepeatedField(proto.STRING, number=6,)
transfer_type = proto.Field(proto.ENUM, number=7, enum=transfer.TransferType,)
-
- supports_multiple_transfers = proto.Field(proto.BOOL, number=8)
-
- update_deadline_seconds = proto.Field(proto.INT32, number=9)
-
- default_schedule = proto.Field(proto.STRING, number=10)
-
- supports_custom_schedule = proto.Field(proto.BOOL, number=11)
-
+ supports_multiple_transfers = proto.Field(proto.BOOL, number=8,)
+ update_deadline_seconds = proto.Field(proto.INT32, number=9,)
+ default_schedule = proto.Field(proto.STRING, number=10,)
+ supports_custom_schedule = proto.Field(proto.BOOL, number=11,)
parameters = proto.RepeatedField(
proto.MESSAGE, number=12, message="DataSourceParameter",
)
-
- help_url = proto.Field(proto.STRING, number=13)
-
+ help_url = proto.Field(proto.STRING, number=13,)
authorization_type = proto.Field(proto.ENUM, number=14, enum=AuthorizationType,)
-
data_refresh_type = proto.Field(proto.ENUM, number=15, enum=DataRefreshType,)
-
- default_data_refresh_window_days = proto.Field(proto.INT32, number=16)
-
- manual_runs_disabled = proto.Field(proto.BOOL, number=17)
-
+ default_data_refresh_window_days = proto.Field(proto.INT32, number=16,)
+ manual_runs_disabled = proto.Field(proto.BOOL, number=17,)
minimum_schedule_interval = proto.Field(
- proto.MESSAGE, number=18, message=duration.Duration,
+ proto.MESSAGE, number=18, message=duration_pb2.Duration,
)
class GetDataSourceRequest(proto.Message):
r"""A request to get data source info.
-
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -280,7 +244,7 @@ class GetDataSourceRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class ListDataSourcesRequest(proto.Message):
@@ -304,16 +268,13 @@ class ListDataSourcesRequest(proto.Message):
maximum value of 1000 results.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- page_token = proto.Field(proto.STRING, number=3)
-
- page_size = proto.Field(proto.INT32, number=4)
+ parent = proto.Field(proto.STRING, number=1,)
+ page_token = proto.Field(proto.STRING, number=3,)
+ page_size = proto.Field(proto.INT32, number=4,)
class ListDataSourcesResponse(proto.Message):
r"""Returns list of supported data sources and their metadata.
-
Attributes:
data_sources (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]):
List of supported data sources and their
@@ -330,8 +291,7 @@ def raw_page(self):
return self
data_sources = proto.RepeatedField(proto.MESSAGE, number=1, message="DataSource",)
-
- next_page_token = proto.Field(proto.STRING, number=2)
+ next_page_token = proto.Field(proto.STRING, number=2,)
class CreateTransferConfigRequest(proto.Message):
@@ -389,17 +349,13 @@ class CreateTransferConfigRequest(proto.Message):
permissions to act as this service account.
"""
- parent = proto.Field(proto.STRING, number=1)
-
+ parent = proto.Field(proto.STRING, number=1,)
transfer_config = proto.Field(
proto.MESSAGE, number=2, message=transfer.TransferConfig,
)
-
- authorization_code = proto.Field(proto.STRING, number=3)
-
- version_info = proto.Field(proto.STRING, number=5)
-
- service_account_name = proto.Field(proto.STRING, number=6)
+ authorization_code = proto.Field(proto.STRING, number=3,)
+ version_info = proto.Field(proto.STRING, number=5,)
+ service_account_name = proto.Field(proto.STRING, number=6,)
class UpdateTransferConfigRequest(proto.Message):
@@ -453,19 +409,16 @@ class UpdateTransferConfigRequest(proto.Message):
transfer_config = proto.Field(
proto.MESSAGE, number=1, message=transfer.TransferConfig,
)
-
- authorization_code = proto.Field(proto.STRING, number=3)
-
- update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,)
-
- version_info = proto.Field(proto.STRING, number=5)
-
- service_account_name = proto.Field(proto.STRING, number=6)
+ authorization_code = proto.Field(proto.STRING, number=3,)
+ update_mask = proto.Field(
+ proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask,
+ )
+ version_info = proto.Field(proto.STRING, number=5,)
+ service_account_name = proto.Field(proto.STRING, number=6,)
class GetTransferConfigRequest(proto.Message):
r"""A request to get data transfer information.
-
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -474,7 +427,7 @@ class GetTransferConfigRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class DeleteTransferConfigRequest(proto.Message):
@@ -489,12 +442,11 @@ class DeleteTransferConfigRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class GetTransferRunRequest(proto.Message):
r"""A request to get data transfer run information.
-
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -504,12 +456,11 @@ class GetTransferRunRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class DeleteTransferRunRequest(proto.Message):
r"""A request to delete data transfer run information.
-
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -519,7 +470,7 @@ class DeleteTransferRunRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class ListTransferConfigsRequest(proto.Message):
@@ -545,18 +496,14 @@ class ListTransferConfigsRequest(proto.Message):
maximum value of 1000 results.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- data_source_ids = proto.RepeatedField(proto.STRING, number=2)
-
- page_token = proto.Field(proto.STRING, number=3)
-
- page_size = proto.Field(proto.INT32, number=4)
+ parent = proto.Field(proto.STRING, number=1,)
+ data_source_ids = proto.RepeatedField(proto.STRING, number=2,)
+ page_token = proto.Field(proto.STRING, number=3,)
+ page_size = proto.Field(proto.INT32, number=4,)
class ListTransferConfigsResponse(proto.Message):
r"""The returned list of pipelines in the project.
-
Attributes:
transfer_configs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]):
Output only. The stored pipeline transfer
@@ -575,8 +522,7 @@ def raw_page(self):
transfer_configs = proto.RepeatedField(
proto.MESSAGE, number=1, message=transfer.TransferConfig,
)
-
- next_page_token = proto.Field(proto.STRING, number=2)
+ next_page_token = proto.Field(proto.STRING, number=2,)
class ListTransferRunsRequest(proto.Message):
@@ -613,20 +559,15 @@ class RunAttempt(proto.Enum):
RUN_ATTEMPT_UNSPECIFIED = 0
LATEST = 1
- parent = proto.Field(proto.STRING, number=1)
-
+ parent = proto.Field(proto.STRING, number=1,)
states = proto.RepeatedField(proto.ENUM, number=2, enum=transfer.TransferState,)
-
- page_token = proto.Field(proto.STRING, number=3)
-
- page_size = proto.Field(proto.INT32, number=4)
-
+ page_token = proto.Field(proto.STRING, number=3,)
+ page_size = proto.Field(proto.INT32, number=4,)
run_attempt = proto.Field(proto.ENUM, number=5, enum=RunAttempt,)
class ListTransferRunsResponse(proto.Message):
r"""The returned list of pipelines in the project.
-
Attributes:
transfer_runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
Output only. The stored pipeline transfer
@@ -645,8 +586,7 @@ def raw_page(self):
transfer_runs = proto.RepeatedField(
proto.MESSAGE, number=1, message=transfer.TransferRun,
)
-
- next_page_token = proto.Field(proto.STRING, number=2)
+ next_page_token = proto.Field(proto.STRING, number=2,)
class ListTransferLogsRequest(proto.Message):
@@ -674,12 +614,9 @@ class ListTransferLogsRequest(proto.Message):
INFO, WARNING and ERROR messages are returned.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- page_token = proto.Field(proto.STRING, number=4)
-
- page_size = proto.Field(proto.INT32, number=5)
-
+ parent = proto.Field(proto.STRING, number=1,)
+ page_token = proto.Field(proto.STRING, number=4,)
+ page_size = proto.Field(proto.INT32, number=5,)
message_types = proto.RepeatedField(
proto.ENUM, number=6, enum=transfer.TransferMessage.MessageSeverity,
)
@@ -687,7 +624,6 @@ class ListTransferLogsRequest(proto.Message):
class ListTransferLogsResponse(proto.Message):
r"""The returned list transfer run messages.
-
Attributes:
transfer_messages (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]):
Output only. The stored pipeline transfer
@@ -706,8 +642,7 @@ def raw_page(self):
transfer_messages = proto.RepeatedField(
proto.MESSAGE, number=1, message=transfer.TransferMessage,
)
-
- next_page_token = proto.Field(proto.STRING, number=2)
+ next_page_token = proto.Field(proto.STRING, number=2,)
class CheckValidCredsRequest(proto.Message):
@@ -726,7 +661,7 @@ class CheckValidCredsRequest(proto.Message):
``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``.
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class CheckValidCredsResponse(proto.Message):
@@ -738,12 +673,11 @@ class CheckValidCredsResponse(proto.Message):
If set to ``true``, the credentials exist and are valid.
"""
- has_valid_creds = proto.Field(proto.BOOL, number=1)
+ has_valid_creds = proto.Field(proto.BOOL, number=1,)
class ScheduleTransferRunsRequest(proto.Message):
r"""A request to schedule transfer runs for a time range.
-
Attributes:
parent (str):
Required. Transfer configuration name in the form:
@@ -757,16 +691,13 @@ class ScheduleTransferRunsRequest(proto.Message):
example, ``"2017-05-30T00:00:00+00:00"``.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
-
- end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+ parent = proto.Field(proto.STRING, number=1,)
+ start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
+ end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,)
class ScheduleTransferRunsResponse(proto.Message):
r"""A response to schedule transfer runs for a time range.
-
Attributes:
runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
The transfer runs that were scheduled.
@@ -777,7 +708,6 @@ class ScheduleTransferRunsResponse(proto.Message):
class StartManualTransferRunsRequest(proto.Message):
r"""A request to start manual transfer runs.
-
Attributes:
parent (str):
Transfer configuration name in the form:
@@ -810,24 +740,24 @@ class TimeRange(proto.Message):
range betwen start_time (inclusive) and end_time (exlusive).
"""
- start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
-
- end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
-
- parent = proto.Field(proto.STRING, number=1)
+ start_time = proto.Field(
+ proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
+ )
+ end_time = proto.Field(
+ proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,
+ )
+ parent = proto.Field(proto.STRING, number=1,)
requested_time_range = proto.Field(
proto.MESSAGE, number=3, oneof="time", message=TimeRange,
)
-
requested_run_time = proto.Field(
- proto.MESSAGE, number=4, oneof="time", message=timestamp.Timestamp,
+ proto.MESSAGE, number=4, oneof="time", message=timestamp_pb2.Timestamp,
)
class StartManualTransferRunsResponse(proto.Message):
r"""A response to start manual transfer runs.
-
Attributes:
runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
The transfer runs that were created.
diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
index 63274fab..5e04fc7d 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,13 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
-from google.protobuf import struct_pb2 as struct # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from google.rpc import status_pb2 as status # type: ignore
+from google.protobuf import struct_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -65,12 +62,11 @@ class EmailPreferences(proto.Message):
transfer run failures.
"""
- enable_failure_email = proto.Field(proto.BOOL, number=1)
+ enable_failure_email = proto.Field(proto.BOOL, number=1,)
class ScheduleOptions(proto.Message):
r"""Options customizing the data transfer schedule.
-
Attributes:
disable_auto_scheduling (bool):
If true, automatic scheduling of data
@@ -96,11 +92,9 @@ class ScheduleOptions(proto.Message):
option.
"""
- disable_auto_scheduling = proto.Field(proto.BOOL, number=3)
-
- start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
-
- end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+ disable_auto_scheduling = proto.Field(proto.BOOL, number=3,)
+ start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
class TransferConfig(proto.Message):
@@ -181,36 +175,23 @@ class TransferConfig(proto.Message):
user who owns this transfer config.
"""
- name = proto.Field(proto.STRING, number=1)
-
- destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination")
-
- display_name = proto.Field(proto.STRING, number=3)
-
- data_source_id = proto.Field(proto.STRING, number=5)
-
- params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,)
-
- schedule = proto.Field(proto.STRING, number=7)
-
+ name = proto.Field(proto.STRING, number=1,)
+ destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",)
+ display_name = proto.Field(proto.STRING, number=3,)
+ data_source_id = proto.Field(proto.STRING, number=5,)
+ params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,)
+ schedule = proto.Field(proto.STRING, number=7,)
schedule_options = proto.Field(proto.MESSAGE, number=24, message="ScheduleOptions",)
-
- data_refresh_window_days = proto.Field(proto.INT32, number=12)
-
- disabled = proto.Field(proto.BOOL, number=13)
-
- update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
-
- next_run_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,)
-
+ data_refresh_window_days = proto.Field(proto.INT32, number=12,)
+ disabled = proto.Field(proto.BOOL, number=13,)
+ update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
+ next_run_time = proto.Field(
+ proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,
+ )
state = proto.Field(proto.ENUM, number=10, enum="TransferState",)
-
- user_id = proto.Field(proto.INT64, number=11)
-
- dataset_region = proto.Field(proto.STRING, number=14)
-
- notification_pubsub_topic = proto.Field(proto.STRING, number=15)
-
+ user_id = proto.Field(proto.INT64, number=11,)
+ dataset_region = proto.Field(proto.STRING, number=14,)
+ notification_pubsub_topic = proto.Field(proto.STRING, number=15,)
email_preferences = proto.Field(
proto.MESSAGE, number=18, message="EmailPreferences",
)
@@ -218,7 +199,6 @@ class TransferConfig(proto.Message):
class TransferRun(proto.Message):
r"""Represents a data transfer run.
-
Attributes:
name (str):
The resource name of the transfer run. Transfer run names
@@ -274,34 +254,22 @@ class TransferRun(proto.Message):
this run was derived from.
"""
- name = proto.Field(proto.STRING, number=1)
-
- schedule_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
-
- run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,)
-
- error_status = proto.Field(proto.MESSAGE, number=21, message=status.Status,)
-
- start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
-
- end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,)
-
- update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,)
-
- params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,)
-
- destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination")
-
- data_source_id = proto.Field(proto.STRING, number=7)
-
+ name = proto.Field(proto.STRING, number=1,)
+ schedule_time = proto.Field(
+ proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,
+ )
+ run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,)
+ error_status = proto.Field(proto.MESSAGE, number=21, message=status_pb2.Status,)
+ start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
+ end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
+ update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
+ params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,)
+ destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",)
+ data_source_id = proto.Field(proto.STRING, number=7,)
state = proto.Field(proto.ENUM, number=8, enum="TransferState",)
-
- user_id = proto.Field(proto.INT64, number=11)
-
- schedule = proto.Field(proto.STRING, number=12)
-
- notification_pubsub_topic = proto.Field(proto.STRING, number=23)
-
+ user_id = proto.Field(proto.INT64, number=11,)
+ schedule = proto.Field(proto.STRING, number=12,)
+ notification_pubsub_topic = proto.Field(proto.STRING, number=23,)
email_preferences = proto.Field(
proto.MESSAGE, number=25, message="EmailPreferences",
)
@@ -327,11 +295,11 @@ class MessageSeverity(proto.Enum):
WARNING = 2
ERROR = 3
- message_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
-
+ message_time = proto.Field(
+ proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
+ )
severity = proto.Field(proto.ENUM, number=2, enum=MessageSeverity,)
-
- message_text = proto.Field(proto.STRING, number=3)
+ message_text = proto.Field(proto.STRING, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index af50a606..94ee6a8f 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -62,16 +62,9 @@ def lint(session):
session.run("flake8", "google", "tests")
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
- """Run black.
-
- Format code to uniform standard.
-
- This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
- That run uses an image that doesn't have 3.6 installed. Before updating this
- check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
- """
+ """Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
session.run(
"black", *BLACK_PATHS,
@@ -131,9 +124,6 @@ def system(session):
# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
- # Sanity check: Only run tests if the environment variable is set.
- if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
- session.skip("Credentials must be set via environment variable")
# Install pyopenssl for mTLS testing.
if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
session.install("pyopenssl")
@@ -189,7 +179,7 @@ def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark")
+ session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -211,7 +201,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml")
+ session.install(
+ "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/owlbot.py b/owlbot.py
index 87e7c517..2281a7a8 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -26,17 +26,18 @@
# Generate bigquery_datatransfer GAPIC layer
# ----------------------------------------------------------------------------
for library in s.get_staging_dirs("v1"):
- # Fix missing async client in datatransfer_v1
- # https://github.com/googleapis/gapic-generator-python/issues/815
+
+ # Comment out broken assertion in unit test
+ # https://github.com/googleapis/gapic-generator-python/issues/897
s.replace(
- library / "google/cloud/bigquery_datatransfer_v1/__init__.py",
- r"from \.services\.data_transfer_service import DataTransferServiceClient",
- "\\g<0>\nfrom .services.data_transfer_service import DataTransferServiceAsyncClient",
+ library / "tests/**/*.py",
+ "assert args\[0\]\.start_time == timestamp_pb2\.Timestamp\(seconds=751\)",
+ "# assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751)"
)
s.replace(
- library / "google/cloud/bigquery_datatransfer_v1/__init__.py",
- r"'DataTransferServiceClient',",
- '\\g<0>\n "DataTransferServiceAsyncClient"',
+ library / "tests/**/*.py",
+ "assert args\[0\]\.end_time == timestamp_pb2\.Timestamp\(seconds=751\)",
+ "# assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751)"
)
s.move(library, excludes=["*.tar.gz", "docs/index.rst", "README.rst", "setup.py"])
diff --git a/renovate.json b/renovate.json
index f08bc22c..c0489556 100644
--- a/renovate.json
+++ b/renovate.json
@@ -2,5 +2,8 @@
"extends": [
"config:base", ":preserveSemverRanges"
],
- "ignorePaths": [".pre-commit-config.yaml"]
+ "ignorePaths": [".pre-commit-config.yaml"],
+ "pip_requirements": {
+ "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+ }
}
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 97bf7da8..5ff9e1db 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -50,7 +50,10 @@
# to use your own Cloud project.
'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
-
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
'envs': {},
@@ -170,12 +173,21 @@ def blacken(session: nox.sessions.Session) -> None:
def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+ if TEST_CONFIG["pip_version_override"]:
+ pip_version = TEST_CONFIG["pip_version_override"]
+ session.install(f"pip=={pip_version}")
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
- session.install("-r", "requirements.txt")
+ if os.path.exists("constraints.txt"):
+ session.install("-r", "requirements.txt", "-c", "constraints.txt")
+ else:
+ session.install("-r", "requirements.txt")
if os.path.exists("requirements-test.txt"):
- session.install("-r", "requirements-test.txt")
+ if os.path.exists("constraints-test.txt"):
+ session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt")
+ else:
+ session.install("-r", "requirements-test.txt")
if INSTALL_LIBRARY_FROM_SOURCE:
session.install("-e", _get_repo_root())
diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt
index fff09f56..e4d022b0 100644
--- a/samples/snippets/requirements-test.txt
+++ b/samples/snippets/requirements-test.txt
@@ -1,3 +1,3 @@
-google-cloud-bigquery==2.6.0
-pytest==6.0.1
-mock==4.0.2
+google-cloud-bigquery==2.20.0
+pytest==6.2.4
+mock==4.0.3
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index e7fe546c..27259498 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1 +1 @@
-google-cloud-bigquery-datatransfer==3.1.0
+google-cloud-bigquery-datatransfer==3.1.1
diff --git a/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/scripts/fixup_bigquery_datatransfer_v1_keywords.py
index c32d8022..f7746ab5 100644
--- a/scripts/fixup_bigquery_datatransfer_v1_keywords.py
+++ b/scripts/fixup_bigquery_datatransfer_v1_keywords.py
@@ -1,6 +1,5 @@
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import argparse
import os
import libcst as cst
@@ -41,21 +39,20 @@ def partition(
class bigquery_datatransferCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'check_valid_creds': ('name', ),
- 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ),
- 'delete_transfer_config': ('name', ),
- 'delete_transfer_run': ('name', ),
- 'get_data_source': ('name', ),
- 'get_transfer_config': ('name', ),
- 'get_transfer_run': ('name', ),
- 'list_data_sources': ('parent', 'page_token', 'page_size', ),
- 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ),
- 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ),
- 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ),
- 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ),
- 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ),
- 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ),
-
+ 'check_valid_creds': ('name', ),
+ 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ),
+ 'delete_transfer_config': ('name', ),
+ 'delete_transfer_run': ('name', ),
+ 'get_data_source': ('name', ),
+ 'get_transfer_config': ('name', ),
+ 'get_transfer_run': ('name', ),
+ 'list_data_sources': ('parent', 'page_token', 'page_size', ),
+ 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ),
+ 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ),
+ 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ),
+ 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ),
+ 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ),
+ 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
@@ -86,7 +83,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
- cst.Element(value=arg.value)
+cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
diff --git a/setup.py b/setup.py
index 63033cb8..043dd179 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-bigquery-datatransfer"
description = "BigQuery Data Transfer API client library"
-version = "3.1.1"
+version = "3.2.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
@@ -31,6 +31,7 @@
dependencies = (
"google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
"proto-plus >= 1.15.0",
+ "packaging >= 14.3",
)
extras = {"libcst": "libcst >= 0.2.5"}
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index 28be0fe2..e6739c3e 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -8,3 +8,5 @@
google-api-core==1.22.2
proto-plus==1.15.0
libcst==0.2.5
+packaging==14.3
+google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is transitively required through google-api-core
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 00000000..4de65971
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 00000000..4de65971
--- /dev/null
+++ b/tests/unit/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py
new file mode 100644
index 00000000..4de65971
--- /dev/null
+++ b/tests/unit/gapic/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py b/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py
index 42ffdf2b..4de65971 100644
--- a/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py
+++ b/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
index 345a6b56..cf70ccc9 100644
--- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
+++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import os
import mock
+import packaging.version
import grpc
from grpc.experimental import aio
@@ -24,13 +23,13 @@
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
-from google import auth
+
from google.api_core import client_options
-from google.api_core import exceptions
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
-from google.auth import credentials
+from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import (
DataTransferServiceAsyncClient,
@@ -42,14 +41,44 @@
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import (
transports,
)
+from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.base import (
+ _API_CORE_VERSION,
+)
+from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.base import (
+ _GOOGLE_AUTH_VERSION,
+)
from google.cloud.bigquery_datatransfer_v1.types import datatransfer
from google.cloud.bigquery_datatransfer_v1.types import transfer
from google.oauth2 import service_account
-from google.protobuf import duration_pb2 as duration # type: ignore
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.protobuf import struct_pb2 as struct # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from google.rpc import status_pb2 as status # type: ignore
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import field_mask_pb2 # type: ignore
+from google.protobuf import struct_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
+import google.auth
+
+
+# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
+# - Delete all the api-core and auth "less than" test cases
+# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
+requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth < 1.25.0",
+)
+requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth >= 1.25.0",
+)
+
+requires_api_core_lt_1_26_0 = pytest.mark.skipif(
+ packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
+ reason="This test requires google-api-core < 1.26.0",
+)
+
+requires_api_core_gte_1_26_0 = pytest.mark.skipif(
+ packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
+ reason="This test requires google-api-core >= 1.26.0",
+)
def client_cert_source_callback():
@@ -101,7 +130,7 @@ def test__get_default_mtls_endpoint():
"client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,]
)
def test_data_transfer_service_client_from_service_account_info(client_class):
- creds = credentials.AnonymousCredentials()
+ creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
@@ -118,7 +147,7 @@ def test_data_transfer_service_client_from_service_account_info(client_class):
"client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,]
)
def test_data_transfer_service_client_from_service_account_file(client_class):
- creds = credentials.AnonymousCredentials()
+ creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
@@ -175,7 +204,7 @@ def test_data_transfer_service_client_client_options(
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc:
- transport = transport_class(credentials=credentials.AnonymousCredentials())
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
@@ -481,7 +510,7 @@ def test_get_data_source(
transport: str = "grpc", request_type=datatransfer.GetDataSourceRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -509,55 +538,36 @@ def test_get_data_source(
default_data_refresh_window_days=3379,
manual_runs_disabled=True,
)
-
response = client.get_data_source(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetDataSourceRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, datatransfer.DataSource)
-
assert response.name == "name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.display_name == "display_name_value"
-
assert response.description == "description_value"
-
assert response.client_id == "client_id_value"
-
assert response.scopes == ["scopes_value"]
-
assert response.transfer_type == transfer.TransferType.BATCH
-
assert response.supports_multiple_transfers is True
-
assert response.update_deadline_seconds == 2406
-
assert response.default_schedule == "default_schedule_value"
-
assert response.supports_custom_schedule is True
-
assert response.help_url == "help_url_value"
-
assert (
response.authorization_type
== datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE
)
-
assert (
response.data_refresh_type
== datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW
)
-
assert response.default_data_refresh_window_days == 3379
-
assert response.manual_runs_disabled is True
@@ -569,7 +579,7 @@ def test_get_data_source_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -577,7 +587,6 @@ def test_get_data_source_empty_call():
client.get_data_source()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetDataSourceRequest()
@@ -586,7 +595,7 @@ async def test_get_data_source_async(
transport: str = "grpc_asyncio", request_type=datatransfer.GetDataSourceRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -616,54 +625,36 @@ async def test_get_data_source_async(
manual_runs_disabled=True,
)
)
-
response = await client.get_data_source(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetDataSourceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.DataSource)
-
assert response.name == "name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.display_name == "display_name_value"
-
assert response.description == "description_value"
-
assert response.client_id == "client_id_value"
-
assert response.scopes == ["scopes_value"]
-
assert response.transfer_type == transfer.TransferType.BATCH
-
assert response.supports_multiple_transfers is True
-
assert response.update_deadline_seconds == 2406
-
assert response.default_schedule == "default_schedule_value"
-
assert response.supports_custom_schedule is True
-
assert response.help_url == "help_url_value"
-
assert (
response.authorization_type
== datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE
)
-
assert (
response.data_refresh_type
== datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW
)
-
assert response.default_data_refresh_window_days == 3379
-
assert response.manual_runs_disabled is True
@@ -673,17 +664,19 @@ async def test_get_data_source_async_from_dict():
def test_get_data_source_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetDataSourceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
call.return_value = datatransfer.DataSource()
-
client.get_data_source(request)
# Establish that the underlying gRPC stub method was called.
@@ -699,12 +692,13 @@ def test_get_data_source_field_headers():
@pytest.mark.asyncio
async def test_get_data_source_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetDataSourceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -712,7 +706,6 @@ async def test_get_data_source_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.DataSource()
)
-
await client.get_data_source(request)
# Establish that the underlying gRPC stub method was called.
@@ -726,13 +719,14 @@ async def test_get_data_source_field_headers_async():
def test_get_data_source_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.DataSource()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_data_source(name="name_value",)
@@ -741,12 +735,13 @@ def test_get_data_source_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_get_data_source_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -759,7 +754,7 @@ def test_get_data_source_flattened_error():
@pytest.mark.asyncio
async def test_get_data_source_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -778,14 +773,13 @@ async def test_get_data_source_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_data_source_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -800,7 +794,7 @@ def test_list_data_sources(
transport: str = "grpc", request_type=datatransfer.ListDataSourcesRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -815,19 +809,15 @@ def test_list_data_sources(
call.return_value = datatransfer.ListDataSourcesResponse(
next_page_token="next_page_token_value",
)
-
response = client.list_data_sources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListDataSourcesRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListDataSourcesPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -839,7 +829,7 @@ def test_list_data_sources_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -849,7 +839,6 @@ def test_list_data_sources_empty_call():
client.list_data_sources()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListDataSourcesRequest()
@@ -858,7 +847,7 @@ async def test_list_data_sources_async(
transport: str = "grpc_asyncio", request_type=datatransfer.ListDataSourcesRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -875,18 +864,15 @@ async def test_list_data_sources_async(
next_page_token="next_page_token_value",
)
)
-
response = await client.list_data_sources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListDataSourcesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDataSourcesAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -896,11 +882,14 @@ async def test_list_data_sources_async_from_dict():
def test_list_data_sources_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListDataSourcesRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -908,7 +897,6 @@ def test_list_data_sources_field_headers():
type(client.transport.list_data_sources), "__call__"
) as call:
call.return_value = datatransfer.ListDataSourcesResponse()
-
client.list_data_sources(request)
# Establish that the underlying gRPC stub method was called.
@@ -924,12 +912,13 @@ def test_list_data_sources_field_headers():
@pytest.mark.asyncio
async def test_list_data_sources_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListDataSourcesRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -939,7 +928,6 @@ async def test_list_data_sources_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListDataSourcesResponse()
)
-
await client.list_data_sources(request)
# Establish that the underlying gRPC stub method was called.
@@ -953,7 +941,9 @@ async def test_list_data_sources_field_headers_async():
def test_list_data_sources_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -961,7 +951,6 @@ def test_list_data_sources_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListDataSourcesResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_data_sources(parent="parent_value",)
@@ -970,12 +959,13 @@ def test_list_data_sources_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
def test_list_data_sources_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -988,7 +978,7 @@ def test_list_data_sources_flattened_error():
@pytest.mark.asyncio
async def test_list_data_sources_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1009,14 +999,13 @@ async def test_list_data_sources_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_data_sources_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1028,7 +1017,7 @@ async def test_list_data_sources_flattened_error_async():
def test_list_data_sources_pager():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1070,7 +1059,7 @@ def test_list_data_sources_pager():
def test_list_data_sources_pages():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1105,7 +1094,7 @@ def test_list_data_sources_pages():
@pytest.mark.asyncio
async def test_list_data_sources_async_pager():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1148,7 +1137,7 @@ async def test_list_data_sources_async_pager():
@pytest.mark.asyncio
async def test_list_data_sources_async_pages():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1189,7 +1178,7 @@ def test_create_transfer_config(
transport: str = "grpc", request_type=datatransfer.CreateTransferConfigRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1214,37 +1203,24 @@ def test_create_transfer_config(
notification_pubsub_topic="notification_pubsub_topic_value",
destination_dataset_id="destination_dataset_id_value",
)
-
response = client.create_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CreateTransferConfigRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -1256,7 +1232,7 @@ def test_create_transfer_config_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1266,7 +1242,6 @@ def test_create_transfer_config_empty_call():
client.create_transfer_config()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CreateTransferConfigRequest()
@@ -1276,7 +1251,7 @@ async def test_create_transfer_config_async(
request_type=datatransfer.CreateTransferConfigRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1302,36 +1277,24 @@ async def test_create_transfer_config_async(
notification_pubsub_topic="notification_pubsub_topic_value",
)
)
-
response = await client.create_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CreateTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -1341,11 +1304,14 @@ async def test_create_transfer_config_async_from_dict():
def test_create_transfer_config_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.CreateTransferConfigRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1353,7 +1319,6 @@ def test_create_transfer_config_field_headers():
type(client.transport.create_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
-
client.create_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1369,12 +1334,13 @@ def test_create_transfer_config_field_headers():
@pytest.mark.asyncio
async def test_create_transfer_config_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.CreateTransferConfigRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1384,7 +1350,6 @@ async def test_create_transfer_config_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
)
-
await client.create_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1398,7 +1363,9 @@ async def test_create_transfer_config_field_headers_async():
def test_create_transfer_config_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1406,7 +1373,6 @@ def test_create_transfer_config_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_transfer_config(
@@ -1418,14 +1384,14 @@ def test_create_transfer_config_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
assert args[0].transfer_config == transfer.TransferConfig(name="name_value")
def test_create_transfer_config_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1440,7 +1406,7 @@ def test_create_transfer_config_flattened_error():
@pytest.mark.asyncio
async def test_create_transfer_config_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1464,16 +1430,14 @@ async def test_create_transfer_config_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
assert args[0].transfer_config == transfer.TransferConfig(name="name_value")
@pytest.mark.asyncio
async def test_create_transfer_config_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1490,7 +1454,7 @@ def test_update_transfer_config(
transport: str = "grpc", request_type=datatransfer.UpdateTransferConfigRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1515,37 +1479,24 @@ def test_update_transfer_config(
notification_pubsub_topic="notification_pubsub_topic_value",
destination_dataset_id="destination_dataset_id_value",
)
-
response = client.update_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.UpdateTransferConfigRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -1557,7 +1508,7 @@ def test_update_transfer_config_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1567,7 +1518,6 @@ def test_update_transfer_config_empty_call():
client.update_transfer_config()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.UpdateTransferConfigRequest()
@@ -1577,7 +1527,7 @@ async def test_update_transfer_config_async(
request_type=datatransfer.UpdateTransferConfigRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1603,36 +1553,24 @@ async def test_update_transfer_config_async(
notification_pubsub_topic="notification_pubsub_topic_value",
)
)
-
response = await client.update_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.UpdateTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -1642,11 +1580,14 @@ async def test_update_transfer_config_async_from_dict():
def test_update_transfer_config_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.UpdateTransferConfigRequest()
+
request.transfer_config.name = "transfer_config.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1654,7 +1595,6 @@ def test_update_transfer_config_field_headers():
type(client.transport.update_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
-
client.update_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1673,12 +1613,13 @@ def test_update_transfer_config_field_headers():
@pytest.mark.asyncio
async def test_update_transfer_config_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.UpdateTransferConfigRequest()
+
request.transfer_config.name = "transfer_config.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1688,7 +1629,6 @@ async def test_update_transfer_config_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
)
-
await client.update_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1705,7 +1645,9 @@ async def test_update_transfer_config_field_headers_async():
def test_update_transfer_config_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1713,26 +1655,25 @@ def test_update_transfer_config_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_transfer_config(
transfer_config=transfer.TransferConfig(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].transfer_config == transfer.TransferConfig(name="name_value")
-
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+ assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_transfer_config_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1740,14 +1681,14 @@ def test_update_transfer_config_flattened_error():
client.update_transfer_config(
datatransfer.UpdateTransferConfigRequest(),
transfer_config=transfer.TransferConfig(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_transfer_config_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1764,23 +1705,21 @@ async def test_update_transfer_config_flattened_async():
# using the keyword arguments to the method.
response = await client.update_transfer_config(
transfer_config=transfer.TransferConfig(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].transfer_config == transfer.TransferConfig(name="name_value")
-
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+ assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_transfer_config_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1789,7 +1728,7 @@ async def test_update_transfer_config_flattened_error_async():
await client.update_transfer_config(
datatransfer.UpdateTransferConfigRequest(),
transfer_config=transfer.TransferConfig(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@@ -1797,7 +1736,7 @@ def test_delete_transfer_config(
transport: str = "grpc", request_type=datatransfer.DeleteTransferConfigRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1810,13 +1749,11 @@ def test_delete_transfer_config(
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
response = client.delete_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferConfigRequest()
# Establish that the response is the type that we expect.
@@ -1831,7 +1768,7 @@ def test_delete_transfer_config_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1841,7 +1778,6 @@ def test_delete_transfer_config_empty_call():
client.delete_transfer_config()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferConfigRequest()
@@ -1851,7 +1787,7 @@ async def test_delete_transfer_config_async(
request_type=datatransfer.DeleteTransferConfigRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1864,13 +1800,11 @@ async def test_delete_transfer_config_async(
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
response = await client.delete_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferConfigRequest()
# Establish that the response is the type that we expect.
@@ -1883,11 +1817,14 @@ async def test_delete_transfer_config_async_from_dict():
def test_delete_transfer_config_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.DeleteTransferConfigRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1895,7 +1832,6 @@ def test_delete_transfer_config_field_headers():
type(client.transport.delete_transfer_config), "__call__"
) as call:
call.return_value = None
-
client.delete_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1911,12 +1847,13 @@ def test_delete_transfer_config_field_headers():
@pytest.mark.asyncio
async def test_delete_transfer_config_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.DeleteTransferConfigRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1924,7 +1861,6 @@ async def test_delete_transfer_config_field_headers_async():
type(client.transport.delete_transfer_config), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
await client.delete_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -1938,7 +1874,9 @@ async def test_delete_transfer_config_field_headers_async():
def test_delete_transfer_config_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -1946,7 +1884,6 @@ def test_delete_transfer_config_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_transfer_config(name="name_value",)
@@ -1955,12 +1892,13 @@ def test_delete_transfer_config_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_delete_transfer_config_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -1973,7 +1911,7 @@ def test_delete_transfer_config_flattened_error():
@pytest.mark.asyncio
async def test_delete_transfer_config_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1992,14 +1930,13 @@ async def test_delete_transfer_config_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_transfer_config_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2014,7 +1951,7 @@ def test_get_transfer_config(
transport: str = "grpc", request_type=datatransfer.GetTransferConfigRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2039,37 +1976,24 @@ def test_get_transfer_config(
notification_pubsub_topic="notification_pubsub_topic_value",
destination_dataset_id="destination_dataset_id_value",
)
-
response = client.get_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferConfigRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -2081,7 +2005,7 @@ def test_get_transfer_config_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2091,7 +2015,6 @@ def test_get_transfer_config_empty_call():
client.get_transfer_config()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferConfigRequest()
@@ -2100,7 +2023,7 @@ async def test_get_transfer_config_async(
transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferConfigRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2126,36 +2049,24 @@ async def test_get_transfer_config_async(
notification_pubsub_topic="notification_pubsub_topic_value",
)
)
-
response = await client.get_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.schedule == "schedule_value"
-
assert response.data_refresh_window_days == 2543
-
assert response.disabled is True
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.dataset_region == "dataset_region_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -2165,11 +2076,14 @@ async def test_get_transfer_config_async_from_dict():
def test_get_transfer_config_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetTransferConfigRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2177,7 +2091,6 @@ def test_get_transfer_config_field_headers():
type(client.transport.get_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
-
client.get_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -2193,12 +2106,13 @@ def test_get_transfer_config_field_headers():
@pytest.mark.asyncio
async def test_get_transfer_config_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetTransferConfigRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2208,7 +2122,6 @@ async def test_get_transfer_config_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
)
-
await client.get_transfer_config(request)
# Establish that the underlying gRPC stub method was called.
@@ -2222,7 +2135,9 @@ async def test_get_transfer_config_field_headers_async():
def test_get_transfer_config_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2230,7 +2145,6 @@ def test_get_transfer_config_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_transfer_config(name="name_value",)
@@ -2239,12 +2153,13 @@ def test_get_transfer_config_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_get_transfer_config_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -2257,7 +2172,7 @@ def test_get_transfer_config_flattened_error():
@pytest.mark.asyncio
async def test_get_transfer_config_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2278,14 +2193,13 @@ async def test_get_transfer_config_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_transfer_config_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2300,7 +2214,7 @@ def test_list_transfer_configs(
transport: str = "grpc", request_type=datatransfer.ListTransferConfigsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2315,19 +2229,15 @@ def test_list_transfer_configs(
call.return_value = datatransfer.ListTransferConfigsResponse(
next_page_token="next_page_token_value",
)
-
response = client.list_transfer_configs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferConfigsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListTransferConfigsPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -2339,7 +2249,7 @@ def test_list_transfer_configs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2349,7 +2259,6 @@ def test_list_transfer_configs_empty_call():
client.list_transfer_configs()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferConfigsRequest()
@@ -2359,7 +2268,7 @@ async def test_list_transfer_configs_async(
request_type=datatransfer.ListTransferConfigsRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2376,18 +2285,15 @@ async def test_list_transfer_configs_async(
next_page_token="next_page_token_value",
)
)
-
response = await client.list_transfer_configs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferConfigsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferConfigsAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -2397,11 +2303,14 @@ async def test_list_transfer_configs_async_from_dict():
def test_list_transfer_configs_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferConfigsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2409,7 +2318,6 @@ def test_list_transfer_configs_field_headers():
type(client.transport.list_transfer_configs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferConfigsResponse()
-
client.list_transfer_configs(request)
# Establish that the underlying gRPC stub method was called.
@@ -2425,12 +2333,13 @@ def test_list_transfer_configs_field_headers():
@pytest.mark.asyncio
async def test_list_transfer_configs_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferConfigsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2440,7 +2349,6 @@ async def test_list_transfer_configs_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferConfigsResponse()
)
-
await client.list_transfer_configs(request)
# Establish that the underlying gRPC stub method was called.
@@ -2454,7 +2362,9 @@ async def test_list_transfer_configs_field_headers_async():
def test_list_transfer_configs_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2462,7 +2372,6 @@ def test_list_transfer_configs_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferConfigsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_transfer_configs(parent="parent_value",)
@@ -2471,12 +2380,13 @@ def test_list_transfer_configs_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
def test_list_transfer_configs_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -2489,7 +2399,7 @@ def test_list_transfer_configs_flattened_error():
@pytest.mark.asyncio
async def test_list_transfer_configs_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2510,14 +2420,13 @@ async def test_list_transfer_configs_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_transfer_configs_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2529,7 +2438,7 @@ async def test_list_transfer_configs_flattened_error_async():
def test_list_transfer_configs_pager():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2574,7 +2483,7 @@ def test_list_transfer_configs_pager():
def test_list_transfer_configs_pages():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2612,7 +2521,7 @@ def test_list_transfer_configs_pages():
@pytest.mark.asyncio
async def test_list_transfer_configs_async_pager():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2658,7 +2567,7 @@ async def test_list_transfer_configs_async_pager():
@pytest.mark.asyncio
async def test_list_transfer_configs_async_pages():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2702,7 +2611,7 @@ def test_schedule_transfer_runs(
transport: str = "grpc", request_type=datatransfer.ScheduleTransferRunsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2715,17 +2624,14 @@ def test_schedule_transfer_runs(
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ScheduleTransferRunsResponse()
-
response = client.schedule_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ScheduleTransferRunsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, datatransfer.ScheduleTransferRunsResponse)
@@ -2737,7 +2643,7 @@ def test_schedule_transfer_runs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2747,7 +2653,6 @@ def test_schedule_transfer_runs_empty_call():
client.schedule_transfer_runs()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ScheduleTransferRunsRequest()
@@ -2757,7 +2662,7 @@ async def test_schedule_transfer_runs_async(
request_type=datatransfer.ScheduleTransferRunsRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2772,13 +2677,11 @@ async def test_schedule_transfer_runs_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ScheduleTransferRunsResponse()
)
-
response = await client.schedule_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ScheduleTransferRunsRequest()
# Establish that the response is the type that we expect.
@@ -2791,11 +2694,14 @@ async def test_schedule_transfer_runs_async_from_dict():
def test_schedule_transfer_runs_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ScheduleTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2803,7 +2709,6 @@ def test_schedule_transfer_runs_field_headers():
type(client.transport.schedule_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.ScheduleTransferRunsResponse()
-
client.schedule_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -2819,12 +2724,13 @@ def test_schedule_transfer_runs_field_headers():
@pytest.mark.asyncio
async def test_schedule_transfer_runs_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ScheduleTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2834,7 +2740,6 @@ async def test_schedule_transfer_runs_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ScheduleTransferRunsResponse()
)
-
await client.schedule_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -2848,7 +2753,9 @@ async def test_schedule_transfer_runs_field_headers_async():
def test_schedule_transfer_runs_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2856,33 +2763,27 @@ def test_schedule_transfer_runs_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ScheduleTransferRunsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.schedule_transfer_runs(
parent="parent_value",
- start_time=timestamp.Timestamp(seconds=751),
- end_time=timestamp.Timestamp(seconds=751),
+ start_time=timestamp_pb2.Timestamp(seconds=751),
+ end_time=timestamp_pb2.Timestamp(seconds=751),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
- assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp(
- seconds=751
- )
-
- assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp(
- seconds=751
- )
+ # assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751)
+ # assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751)
def test_schedule_transfer_runs_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -2890,15 +2791,15 @@ def test_schedule_transfer_runs_flattened_error():
client.schedule_transfer_runs(
datatransfer.ScheduleTransferRunsRequest(),
parent="parent_value",
- start_time=timestamp.Timestamp(seconds=751),
- end_time=timestamp.Timestamp(seconds=751),
+ start_time=timestamp_pb2.Timestamp(seconds=751),
+ end_time=timestamp_pb2.Timestamp(seconds=751),
)
@pytest.mark.asyncio
async def test_schedule_transfer_runs_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2915,30 +2816,23 @@ async def test_schedule_transfer_runs_flattened_async():
# using the keyword arguments to the method.
response = await client.schedule_transfer_runs(
parent="parent_value",
- start_time=timestamp.Timestamp(seconds=751),
- end_time=timestamp.Timestamp(seconds=751),
+ start_time=timestamp_pb2.Timestamp(seconds=751),
+ end_time=timestamp_pb2.Timestamp(seconds=751),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
-
- assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp(
- seconds=751
- )
-
- assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp(
- seconds=751
- )
+ # assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751)
+ # assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751)
@pytest.mark.asyncio
async def test_schedule_transfer_runs_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2947,8 +2841,8 @@ async def test_schedule_transfer_runs_flattened_error_async():
await client.schedule_transfer_runs(
datatransfer.ScheduleTransferRunsRequest(),
parent="parent_value",
- start_time=timestamp.Timestamp(seconds=751),
- end_time=timestamp.Timestamp(seconds=751),
+ start_time=timestamp_pb2.Timestamp(seconds=751),
+ end_time=timestamp_pb2.Timestamp(seconds=751),
)
@@ -2956,7 +2850,7 @@ def test_start_manual_transfer_runs(
transport: str = "grpc", request_type=datatransfer.StartManualTransferRunsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2969,17 +2863,14 @@ def test_start_manual_transfer_runs(
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.StartManualTransferRunsResponse()
-
response = client.start_manual_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.StartManualTransferRunsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, datatransfer.StartManualTransferRunsResponse)
@@ -2991,7 +2882,7 @@ def test_start_manual_transfer_runs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3001,7 +2892,6 @@ def test_start_manual_transfer_runs_empty_call():
client.start_manual_transfer_runs()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.StartManualTransferRunsRequest()
@@ -3011,7 +2901,7 @@ async def test_start_manual_transfer_runs_async(
request_type=datatransfer.StartManualTransferRunsRequest,
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3026,13 +2916,11 @@ async def test_start_manual_transfer_runs_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.StartManualTransferRunsResponse()
)
-
response = await client.start_manual_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.StartManualTransferRunsRequest()
# Establish that the response is the type that we expect.
@@ -3045,11 +2933,14 @@ async def test_start_manual_transfer_runs_async_from_dict():
def test_start_manual_transfer_runs_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.StartManualTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3057,7 +2948,6 @@ def test_start_manual_transfer_runs_field_headers():
type(client.transport.start_manual_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.StartManualTransferRunsResponse()
-
client.start_manual_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -3073,12 +2963,13 @@ def test_start_manual_transfer_runs_field_headers():
@pytest.mark.asyncio
async def test_start_manual_transfer_runs_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.StartManualTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3088,7 +2979,6 @@ async def test_start_manual_transfer_runs_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.StartManualTransferRunsResponse()
)
-
await client.start_manual_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -3105,7 +2995,7 @@ def test_get_transfer_run(
transport: str = "grpc", request_type=datatransfer.GetTransferRunRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3124,29 +3014,20 @@ def test_get_transfer_run(
notification_pubsub_topic="notification_pubsub_topic_value",
destination_dataset_id="destination_dataset_id_value",
)
-
response = client.get_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferRunRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, transfer.TransferRun)
-
assert response.name == "name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.schedule == "schedule_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -3158,7 +3039,7 @@ def test_get_transfer_run_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3166,7 +3047,6 @@ def test_get_transfer_run_empty_call():
client.get_transfer_run()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferRunRequest()
@@ -3175,7 +3055,7 @@ async def test_get_transfer_run_async(
transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferRunRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3195,28 +3075,20 @@ async def test_get_transfer_run_async(
notification_pubsub_topic="notification_pubsub_topic_value",
)
)
-
response = await client.get_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.GetTransferRunRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferRun)
-
assert response.name == "name_value"
-
assert response.data_source_id == "data_source_id_value"
-
assert response.state == transfer.TransferState.PENDING
-
assert response.user_id == 747
-
assert response.schedule == "schedule_value"
-
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
@@ -3226,17 +3098,19 @@ async def test_get_transfer_run_async_from_dict():
def test_get_transfer_run_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetTransferRunRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
call.return_value = transfer.TransferRun()
-
client.get_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
@@ -3252,12 +3126,13 @@ def test_get_transfer_run_field_headers():
@pytest.mark.asyncio
async def test_get_transfer_run_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.GetTransferRunRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3265,7 +3140,6 @@ async def test_get_transfer_run_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferRun()
)
-
await client.get_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
@@ -3279,13 +3153,14 @@ async def test_get_transfer_run_field_headers_async():
def test_get_transfer_run_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferRun()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_transfer_run(name="name_value",)
@@ -3294,12 +3169,13 @@ def test_get_transfer_run_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_get_transfer_run_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -3312,7 +3188,7 @@ def test_get_transfer_run_flattened_error():
@pytest.mark.asyncio
async def test_get_transfer_run_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3331,14 +3207,13 @@ async def test_get_transfer_run_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_transfer_run_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3353,7 +3228,7 @@ def test_delete_transfer_run(
transport: str = "grpc", request_type=datatransfer.DeleteTransferRunRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3366,13 +3241,11 @@ def test_delete_transfer_run(
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
response = client.delete_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferRunRequest()
# Establish that the response is the type that we expect.
@@ -3387,7 +3260,7 @@ def test_delete_transfer_run_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3397,7 +3270,6 @@ def test_delete_transfer_run_empty_call():
client.delete_transfer_run()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferRunRequest()
@@ -3406,7 +3278,7 @@ async def test_delete_transfer_run_async(
transport: str = "grpc_asyncio", request_type=datatransfer.DeleteTransferRunRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3419,13 +3291,11 @@ async def test_delete_transfer_run_async(
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
response = await client.delete_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.DeleteTransferRunRequest()
# Establish that the response is the type that we expect.
@@ -3438,11 +3308,14 @@ async def test_delete_transfer_run_async_from_dict():
def test_delete_transfer_run_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.DeleteTransferRunRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3450,7 +3323,6 @@ def test_delete_transfer_run_field_headers():
type(client.transport.delete_transfer_run), "__call__"
) as call:
call.return_value = None
-
client.delete_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
@@ -3466,12 +3338,13 @@ def test_delete_transfer_run_field_headers():
@pytest.mark.asyncio
async def test_delete_transfer_run_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.DeleteTransferRunRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3479,7 +3352,6 @@ async def test_delete_transfer_run_field_headers_async():
type(client.transport.delete_transfer_run), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
-
await client.delete_transfer_run(request)
# Establish that the underlying gRPC stub method was called.
@@ -3493,7 +3365,9 @@ async def test_delete_transfer_run_field_headers_async():
def test_delete_transfer_run_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -3501,7 +3375,6 @@ def test_delete_transfer_run_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_transfer_run(name="name_value",)
@@ -3510,12 +3383,13 @@ def test_delete_transfer_run_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_delete_transfer_run_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -3528,7 +3402,7 @@ def test_delete_transfer_run_flattened_error():
@pytest.mark.asyncio
async def test_delete_transfer_run_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3547,14 +3421,13 @@ async def test_delete_transfer_run_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_transfer_run_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3569,7 +3442,7 @@ def test_list_transfer_runs(
transport: str = "grpc", request_type=datatransfer.ListTransferRunsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3584,19 +3457,15 @@ def test_list_transfer_runs(
call.return_value = datatransfer.ListTransferRunsResponse(
next_page_token="next_page_token_value",
)
-
response = client.list_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferRunsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListTransferRunsPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -3608,7 +3477,7 @@ def test_list_transfer_runs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3618,7 +3487,6 @@ def test_list_transfer_runs_empty_call():
client.list_transfer_runs()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferRunsRequest()
@@ -3627,7 +3495,7 @@ async def test_list_transfer_runs_async(
transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferRunsRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3644,18 +3512,15 @@ async def test_list_transfer_runs_async(
next_page_token="next_page_token_value",
)
)
-
response = await client.list_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferRunsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferRunsAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -3665,11 +3530,14 @@ async def test_list_transfer_runs_async_from_dict():
def test_list_transfer_runs_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3677,7 +3545,6 @@ def test_list_transfer_runs_field_headers():
type(client.transport.list_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferRunsResponse()
-
client.list_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -3693,12 +3560,13 @@ def test_list_transfer_runs_field_headers():
@pytest.mark.asyncio
async def test_list_transfer_runs_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferRunsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3708,7 +3576,6 @@ async def test_list_transfer_runs_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferRunsResponse()
)
-
await client.list_transfer_runs(request)
# Establish that the underlying gRPC stub method was called.
@@ -3722,7 +3589,9 @@ async def test_list_transfer_runs_field_headers_async():
def test_list_transfer_runs_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -3730,7 +3599,6 @@ def test_list_transfer_runs_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferRunsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_transfer_runs(parent="parent_value",)
@@ -3739,12 +3607,13 @@ def test_list_transfer_runs_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
def test_list_transfer_runs_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -3757,7 +3626,7 @@ def test_list_transfer_runs_flattened_error():
@pytest.mark.asyncio
async def test_list_transfer_runs_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3778,14 +3647,13 @@ async def test_list_transfer_runs_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_transfer_runs_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3797,7 +3665,7 @@ async def test_list_transfer_runs_flattened_error_async():
def test_list_transfer_runs_pager():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -3839,7 +3707,7 @@ def test_list_transfer_runs_pager():
def test_list_transfer_runs_pages():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -3874,7 +3742,7 @@ def test_list_transfer_runs_pages():
@pytest.mark.asyncio
async def test_list_transfer_runs_async_pager():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3917,7 +3785,7 @@ async def test_list_transfer_runs_async_pager():
@pytest.mark.asyncio
async def test_list_transfer_runs_async_pages():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3958,7 +3826,7 @@ def test_list_transfer_logs(
transport: str = "grpc", request_type=datatransfer.ListTransferLogsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3973,19 +3841,15 @@ def test_list_transfer_logs(
call.return_value = datatransfer.ListTransferLogsResponse(
next_page_token="next_page_token_value",
)
-
response = client.list_transfer_logs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferLogsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListTransferLogsPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -3997,7 +3861,7 @@ def test_list_transfer_logs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4007,7 +3871,6 @@ def test_list_transfer_logs_empty_call():
client.list_transfer_logs()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferLogsRequest()
@@ -4016,7 +3879,7 @@ async def test_list_transfer_logs_async(
transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferLogsRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -4033,18 +3896,15 @@ async def test_list_transfer_logs_async(
next_page_token="next_page_token_value",
)
)
-
response = await client.list_transfer_logs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.ListTransferLogsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferLogsAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
@@ -4054,11 +3914,14 @@ async def test_list_transfer_logs_async_from_dict():
def test_list_transfer_logs_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferLogsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4066,7 +3929,6 @@ def test_list_transfer_logs_field_headers():
type(client.transport.list_transfer_logs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferLogsResponse()
-
client.list_transfer_logs(request)
# Establish that the underlying gRPC stub method was called.
@@ -4082,12 +3944,13 @@ def test_list_transfer_logs_field_headers():
@pytest.mark.asyncio
async def test_list_transfer_logs_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.ListTransferLogsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4097,7 +3960,6 @@ async def test_list_transfer_logs_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferLogsResponse()
)
-
await client.list_transfer_logs(request)
# Establish that the underlying gRPC stub method was called.
@@ -4111,7 +3973,9 @@ async def test_list_transfer_logs_field_headers_async():
def test_list_transfer_logs_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -4119,7 +3983,6 @@ def test_list_transfer_logs_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferLogsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_transfer_logs(parent="parent_value",)
@@ -4128,12 +3991,13 @@ def test_list_transfer_logs_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
def test_list_transfer_logs_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -4146,7 +4010,7 @@ def test_list_transfer_logs_flattened_error():
@pytest.mark.asyncio
async def test_list_transfer_logs_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4167,14 +4031,13 @@ async def test_list_transfer_logs_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_transfer_logs_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -4186,7 +4049,7 @@ async def test_list_transfer_logs_flattened_error_async():
def test_list_transfer_logs_pager():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -4231,7 +4094,7 @@ def test_list_transfer_logs_pager():
def test_list_transfer_logs_pages():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -4269,7 +4132,7 @@ def test_list_transfer_logs_pages():
@pytest.mark.asyncio
async def test_list_transfer_logs_async_pager():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4315,7 +4178,7 @@ async def test_list_transfer_logs_async_pager():
@pytest.mark.asyncio
async def test_list_transfer_logs_async_pages():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4359,7 +4222,7 @@ def test_check_valid_creds(
transport: str = "grpc", request_type=datatransfer.CheckValidCredsRequest
):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -4372,19 +4235,15 @@ def test_check_valid_creds(
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.CheckValidCredsResponse(has_valid_creds=True,)
-
response = client.check_valid_creds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CheckValidCredsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, datatransfer.CheckValidCredsResponse)
-
assert response.has_valid_creds is True
@@ -4396,7 +4255,7 @@ def test_check_valid_creds_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4406,7 +4265,6 @@ def test_check_valid_creds_empty_call():
client.check_valid_creds()
call.assert_called()
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CheckValidCredsRequest()
@@ -4415,7 +4273,7 @@ async def test_check_valid_creds_async(
transport: str = "grpc_asyncio", request_type=datatransfer.CheckValidCredsRequest
):
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -4430,18 +4288,15 @@ async def test_check_valid_creds_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.CheckValidCredsResponse(has_valid_creds=True,)
)
-
response = await client.check_valid_creds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == datatransfer.CheckValidCredsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.CheckValidCredsResponse)
-
assert response.has_valid_creds is True
@@ -4451,11 +4306,14 @@ async def test_check_valid_creds_async_from_dict():
def test_check_valid_creds_field_headers():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.CheckValidCredsRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4463,7 +4321,6 @@ def test_check_valid_creds_field_headers():
type(client.transport.check_valid_creds), "__call__"
) as call:
call.return_value = datatransfer.CheckValidCredsResponse()
-
client.check_valid_creds(request)
# Establish that the underlying gRPC stub method was called.
@@ -4479,12 +4336,13 @@ def test_check_valid_creds_field_headers():
@pytest.mark.asyncio
async def test_check_valid_creds_field_headers_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = datatransfer.CheckValidCredsRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4494,7 +4352,6 @@ async def test_check_valid_creds_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.CheckValidCredsResponse()
)
-
await client.check_valid_creds(request)
# Establish that the underlying gRPC stub method was called.
@@ -4508,7 +4365,9 @@ async def test_check_valid_creds_field_headers_async():
def test_check_valid_creds_flattened():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -4516,7 +4375,6 @@ def test_check_valid_creds_flattened():
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.CheckValidCredsResponse()
-
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.check_valid_creds(name="name_value",)
@@ -4525,12 +4383,13 @@ def test_check_valid_creds_flattened():
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
def test_check_valid_creds_flattened_error():
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Attempting to call a method with both a request object and flattened
# fields is an error.
@@ -4543,7 +4402,7 @@ def test_check_valid_creds_flattened_error():
@pytest.mark.asyncio
async def test_check_valid_creds_flattened_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4564,14 +4423,13 @@ async def test_check_valid_creds_flattened_async():
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_check_valid_creds_flattened_error_async():
client = DataTransferServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -4585,16 +4443,16 @@ async def test_check_valid_creds_flattened_error_async():
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.DataTransferServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.DataTransferServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DataTransferServiceClient(
@@ -4604,7 +4462,7 @@ def test_credentials_transport_error():
# It is an error to provide scopes and a transport instance.
transport = transports.DataTransferServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DataTransferServiceClient(
@@ -4615,7 +4473,7 @@ def test_credentials_transport_error():
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.DataTransferServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
client = DataTransferServiceClient(transport=transport)
assert client.transport is transport
@@ -4624,13 +4482,13 @@ def test_transport_instance():
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.DataTransferServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.DataTransferServiceGrpcAsyncIOTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@@ -4645,23 +4503,25 @@ def test_transport_get_channel():
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
- client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
assert isinstance(client.transport, transports.DataTransferServiceGrpcTransport,)
def test_data_transfer_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
- with pytest.raises(exceptions.DuplicateCredentialArgs):
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.DataTransferServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
@@ -4673,7 +4533,7 @@ def test_data_transfer_service_base_transport():
) as Transport:
Transport.return_value = None
transport = transports.DataTransferServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
@@ -4699,15 +4559,37 @@ def test_data_transfer_service_base_transport():
getattr(transport, method)(request=object())
+@requires_google_auth_gte_1_25_0
def test_data_transfer_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
- auth, "load_credentials_from_file"
+ google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.DataTransferServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_data_transfer_service_base_transport_with_credentials_file_old_google_auth():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.DataTransferServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
@@ -4720,19 +4602,33 @@ def test_data_transfer_service_base_transport_with_credentials_file():
def test_data_transfer_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(auth, "default") as adc, mock.patch(
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.DataTransferServiceTransport()
adc.assert_called_once()
+@requires_google_auth_gte_1_25_0
def test_data_transfer_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ DataTransferServiceClient()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_data_transfer_service_auth_adc_old_google_auth():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
DataTransferServiceClient()
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
@@ -4740,20 +4636,156 @@ def test_data_transfer_service_auth_adc():
)
-def test_data_transfer_service_transport_auth_adc():
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.DataTransferServiceGrpcTransport,
+ transports.DataTransferServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_gte_1_25_0
+def test_data_transfer_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- transports.DataTransferServiceGrpcTransport(
- host="squid.clam.whelk", quota_project_id="octopus"
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+ adc.assert_called_once_with(
+ scopes=["1", "2"],
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
)
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.DataTransferServiceGrpcTransport,
+ transports.DataTransferServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_lt_1_25_0
+def test_data_transfer_service_transport_auth_adc_old_google_auth(transport_class):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.DataTransferServiceGrpcTransport, grpc_helpers),
+ (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_gte_1_26_0
+def test_data_transfer_service_transport_create_channel(transport_class, grpc_helpers):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "bigquerydatatransfer.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=["1", "2"],
+ default_host="bigquerydatatransfer.googleapis.com",
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.DataTransferServiceGrpcTransport, grpc_helpers),
+ (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_lt_1_26_0
+def test_data_transfer_service_transport_create_channel_old_api_core(
+ transport_class, grpc_helpers
+):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus")
+
+ create_channel.assert_called_with(
+ "bigquerydatatransfer.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.DataTransferServiceGrpcTransport, grpc_helpers),
+ (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_lt_1_26_0
+def test_data_transfer_service_transport_create_channel_user_scopes(
+ transport_class, grpc_helpers
+):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "bigquerydatatransfer.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ scopes=["1", "2"],
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
@pytest.mark.parametrize(
"transport_class",
[
@@ -4764,7 +4796,7 @@ def test_data_transfer_service_transport_auth_adc():
def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
- cred = credentials.AnonymousCredentials()
+ cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
@@ -4803,7 +4835,7 @@ def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls(
def test_data_transfer_service_host_no_port():
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigquerydatatransfer.googleapis.com"
),
@@ -4813,7 +4845,7 @@ def test_data_transfer_service_host_no_port():
def test_data_transfer_service_host_with_port():
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigquerydatatransfer.googleapis.com:8000"
),
@@ -4869,9 +4901,9 @@ def test_data_transfer_service_transport_channel_mtls_with_client_cert_source(
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
- cred = credentials.AnonymousCredentials()
+ cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
- with mock.patch.object(auth, "default") as adc:
+ with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
@@ -4948,7 +4980,6 @@ def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class):
def test_data_source_path():
project = "squid"
data_source = "clam"
-
expected = "projects/{project}/dataSources/{data_source}".format(
project=project, data_source=data_source,
)
@@ -4972,7 +5003,6 @@ def test_run_path():
project = "oyster"
transfer_config = "nudibranch"
run = "cuttlefish"
-
expected = "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(
project=project, transfer_config=transfer_config, run=run,
)
@@ -4996,7 +5026,6 @@ def test_parse_run_path():
def test_transfer_config_path():
project = "scallop"
transfer_config = "abalone"
-
expected = "projects/{project}/transferConfigs/{transfer_config}".format(
project=project, transfer_config=transfer_config,
)
@@ -5018,7 +5047,6 @@ def test_parse_transfer_config_path():
def test_common_billing_account_path():
billing_account = "whelk"
-
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -5039,7 +5067,6 @@ def test_parse_common_billing_account_path():
def test_common_folder_path():
folder = "oyster"
-
expected = "folders/{folder}".format(folder=folder,)
actual = DataTransferServiceClient.common_folder_path(folder)
assert expected == actual
@@ -5058,7 +5085,6 @@ def test_parse_common_folder_path():
def test_common_organization_path():
organization = "cuttlefish"
-
expected = "organizations/{organization}".format(organization=organization,)
actual = DataTransferServiceClient.common_organization_path(organization)
assert expected == actual
@@ -5077,7 +5103,6 @@ def test_parse_common_organization_path():
def test_common_project_path():
project = "winkle"
-
expected = "projects/{project}".format(project=project,)
actual = DataTransferServiceClient.common_project_path(project)
assert expected == actual
@@ -5097,7 +5122,6 @@ def test_parse_common_project_path():
def test_common_location_path():
project = "scallop"
location = "abalone"
-
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -5124,7 +5148,7 @@ def test_client_withDEFAULT_CLIENT_INFO():
transports.DataTransferServiceTransport, "_prep_wrapped_messages"
) as prep:
client = DataTransferServiceClient(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@@ -5133,6 +5157,6 @@ def test_client_withDEFAULT_CLIENT_INFO():
) as prep:
transport_class = DataTransferServiceClient.get_transport_class()
transport = transport_class(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)