diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index a9fcd07..7d98291 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
index cbbeff4..dafdf88 100644
--- a/.github/.OwlBot.yaml
+++ b/.github/.OwlBot.yaml
@@ -13,7 +13,7 @@
# limitations under the License.
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
deep-remove-regex:
- /owl-bot-staging
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index dd42322..bdb0c90 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastream/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
new file mode 100644
index 0000000..0faefa5
--- /dev/null
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-datastream/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.10/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 0000000..5cfc837
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-datastream/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index d736adb..6d0cb6f 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastream/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
index 50fec96..71cd1e5 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 37de52d..609d8b6 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastream/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec96..71cd1e5 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index ad405ba..c1588e8 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastream/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec96..71cd1e5 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
index 2c2337e..0bb7e1f 100644
--- a/.kokoro/samples/python3.9/common.cfg
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastream/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastream/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
index 50fec96..71cd1e5 100644
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index 643fcbe..ba3a707 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -23,6 +23,4 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-datastream
-
exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 58df11d..11c042d 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -24,8 +24,6 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-datastream
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
# preserving the test runner implementation.
diff --git a/.repo-metadata.json b/.repo-metadata.json
index d94d138..0e237eb 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -9,5 +9,7 @@
"library_type": "GAPIC_AUTO",
"repo": "googleapis/python-datastream",
"distribution_name": "google-cloud-datastream",
- "api_id": "datastream.googleapis.com"
-}
\ No newline at end of file
+ "api_id": "datastream.googleapis.com",
+ "default_version": "v1alpha1",
+ "codeowner_team": ""
+}
diff --git a/.trampolinerc b/.trampolinerc
index 383b6ec..0eee72a 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -16,15 +16,26 @@
# Add required env vars here.
required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
)
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7461211..4923993 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
# Changelog
+## [0.2.0](https://www.github.com/googleapis/python-datastream/compare/v0.1.3...v0.2.0) (2021-10-08)
+
+
+### Features
+
+* add context manager support in client ([#35](https://www.github.com/googleapis/python-datastream/issues/35)) ([fa36978](https://www.github.com/googleapis/python-datastream/commit/fa369789687993fff0359f22110951393c849e70))
+
+
+### Bug Fixes
+
+* add 'dict' annotation type to 'request' ([973c851](https://www.github.com/googleapis/python-datastream/commit/973c851b750768b8405c97d33ed4cfdd66d39d9a))
+* improper types in pagers generation ([09eaafd](https://www.github.com/googleapis/python-datastream/commit/09eaafd1b695b10bfc2bb212974eff11da76782c))
+
### [0.1.3](https://www.github.com/googleapis/python-datastream/compare/v0.1.2...v0.1.3) (2021-08-30)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c5860ea..e0696f4 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+ 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.9 -- -k
+ $ nox -s unit-3.10 -- -k
.. note::
@@ -113,9 +113,9 @@ Coding Style
export GOOGLE_CLOUD_TESTING_BRANCH="main"
By doing this, you are specifying the location of the most up-to-date
- version of ``python-datastream``. The the suggested remote name ``upstream``
- should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``main``).
+ version of ``python-datastream``. The
+ remote name ``upstream`` should point to the official ``googleapis``
+ checkout and the branch should be the default branch on that remote (``main``).
- This repository contains configuration for the
`pre-commit `__ tool, which automates checking
@@ -225,11 +225,13 @@ We support:
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
+- `Python 3.10`_
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/README.rst b/README.rst
index 17a8b00..4bbcea2 100644
--- a/README.rst
+++ b/README.rst
@@ -9,7 +9,7 @@ Python Client for Datastream
- `Product Documentation`_
.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support
+ :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#beta-support
.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastream.svg
:target: https://pypi.org/project/google-cloud-datastream/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastream.svg
@@ -79,4 +79,4 @@ Next Steps
APIs that we cover.
.. _Datastream Product documentation: https://cloud.google.com/datastream/docs
-.. _README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst
+.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst
diff --git a/docs/conf.py b/docs/conf.py
index c59dd39..17393f0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,8 +76,8 @@
# The encoding of source files.
# source_encoding = 'utf-8-sig'
-# The master toctree document.
-master_doc = "index"
+# The root toctree document.
+root_doc = "index"
# General information about the project.
project = "google-cloud-datastream"
@@ -280,7 +280,7 @@
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-datastream.tex",
"google-cloud-datastream Documentation",
author,
@@ -315,7 +315,7 @@
# (source start file, name, description, authors, manual section).
man_pages = [
(
- master_doc,
+ root_doc,
"google-cloud-datastream",
"google-cloud-datastream Documentation",
[author],
@@ -334,7 +334,7 @@
# dir menu entry, description, category)
texinfo_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-datastream",
"google-cloud-datastream Documentation",
author,
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/async_client.py b/google/cloud/datastream_v1alpha1/services/datastream/async_client.py
index 8096f6c..5c8f568 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/async_client.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/async_client.py
@@ -1922,6 +1922,12 @@ async def delete_route(
# Done; return the response.
return response
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/client.py b/google/cloud/datastream_v1alpha1/services/datastream/client.py
index 870fecf..07af2d3 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/client.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/client.py
@@ -17,7 +17,7 @@
from distutils import util
import os
import re
-from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
@@ -405,15 +405,12 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
- always_use_jwt_access=(
- Transport == type(self).get_transport_class("grpc")
- or Transport == type(self).get_transport_class("grpc_asyncio")
- ),
+ always_use_jwt_access=True,
)
def list_connection_profiles(
self,
- request: datastream.ListConnectionProfilesRequest = None,
+ request: Union[datastream.ListConnectionProfilesRequest, dict] = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -424,7 +421,7 @@ def list_connection_profiles(
in a project and location.
Args:
- request (google.cloud.datastream_v1alpha1.types.ListConnectionProfilesRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.ListConnectionProfilesRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -491,7 +488,7 @@ def list_connection_profiles(
def get_connection_profile(
self,
- request: datastream.GetConnectionProfileRequest = None,
+ request: Union[datastream.GetConnectionProfileRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -502,7 +499,7 @@ def get_connection_profile(
profile.
Args:
- request (google.cloud.datastream_v1alpha1.types.GetConnectionProfileRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.GetConnectionProfileRequest, dict]):
The request object.
name (str):
Required. The name of the connection
@@ -560,7 +557,7 @@ def get_connection_profile(
def create_connection_profile(
self,
- request: datastream.CreateConnectionProfileRequest = None,
+ request: Union[datastream.CreateConnectionProfileRequest, dict] = None,
*,
parent: str = None,
connection_profile: datastream_resources.ConnectionProfile = None,
@@ -573,7 +570,7 @@ def create_connection_profile(
project and location.
Args:
- request (google.cloud.datastream_v1alpha1.types.CreateConnectionProfileRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.CreateConnectionProfileRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -663,7 +660,7 @@ def create_connection_profile(
def update_connection_profile(
self,
- request: datastream.UpdateConnectionProfileRequest = None,
+ request: Union[datastream.UpdateConnectionProfileRequest, dict] = None,
*,
connection_profile: datastream_resources.ConnectionProfile = None,
update_mask: field_mask_pb2.FieldMask = None,
@@ -675,7 +672,7 @@ def update_connection_profile(
connection profile.
Args:
- request (google.cloud.datastream_v1alpha1.types.UpdateConnectionProfileRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.UpdateConnectionProfileRequest, dict]):
The request object.
connection_profile (google.cloud.datastream_v1alpha1.types.ConnectionProfile):
Required. The ConnectionProfile to
@@ -763,7 +760,7 @@ def update_connection_profile(
def delete_connection_profile(
self,
- request: datastream.DeleteConnectionProfileRequest = None,
+ request: Union[datastream.DeleteConnectionProfileRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -773,7 +770,7 @@ def delete_connection_profile(
r"""Use this method to delete a connection profile..
Args:
- request (google.cloud.datastream_v1alpha1.types.DeleteConnectionProfileRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.DeleteConnectionProfileRequest, dict]):
The request object.
name (str):
Required. The name of the connection
@@ -856,7 +853,7 @@ def delete_connection_profile(
def discover_connection_profile(
self,
- request: datastream.DiscoverConnectionProfileRequest = None,
+ request: Union[datastream.DiscoverConnectionProfileRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
@@ -869,7 +866,7 @@ def discover_connection_profile(
that's optionally supplied in the request.
Args:
- request (google.cloud.datastream_v1alpha1.types.DiscoverConnectionProfileRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.DiscoverConnectionProfileRequest, dict]):
The request object. Request message for 'discover'
ConnectionProfile request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -910,7 +907,7 @@ def discover_connection_profile(
def list_streams(
self,
- request: datastream.ListStreamsRequest = None,
+ request: Union[datastream.ListStreamsRequest, dict] = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -921,7 +918,7 @@ def list_streams(
location.
Args:
- request (google.cloud.datastream_v1alpha1.types.ListStreamsRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.ListStreamsRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -988,7 +985,7 @@ def list_streams(
def get_stream(
self,
- request: datastream.GetStreamRequest = None,
+ request: Union[datastream.GetStreamRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -998,7 +995,7 @@ def get_stream(
r"""Use this method to get details about a stream.
Args:
- request (google.cloud.datastream_v1alpha1.types.GetStreamRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.GetStreamRequest, dict]):
The request object.
name (str):
Required. The name of the stream
@@ -1056,7 +1053,7 @@ def get_stream(
def create_stream(
self,
- request: datastream.CreateStreamRequest = None,
+ request: Union[datastream.CreateStreamRequest, dict] = None,
*,
parent: str = None,
stream: datastream_resources.Stream = None,
@@ -1068,7 +1065,7 @@ def create_stream(
r"""Use this method to create a stream.
Args:
- request (google.cloud.datastream_v1alpha1.types.CreateStreamRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.CreateStreamRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -1154,7 +1151,7 @@ def create_stream(
def update_stream(
self,
- request: datastream.UpdateStreamRequest = None,
+ request: Union[datastream.UpdateStreamRequest, dict] = None,
*,
stream: datastream_resources.Stream = None,
update_mask: field_mask_pb2.FieldMask = None,
@@ -1166,7 +1163,7 @@ def update_stream(
stream.
Args:
- request (google.cloud.datastream_v1alpha1.types.UpdateStreamRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.UpdateStreamRequest, dict]):
The request object.
stream (google.cloud.datastream_v1alpha1.types.Stream):
Required. The stream resource to
@@ -1251,7 +1248,7 @@ def update_stream(
def delete_stream(
self,
- request: datastream.DeleteStreamRequest = None,
+ request: Union[datastream.DeleteStreamRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1261,7 +1258,7 @@ def delete_stream(
r"""Use this method to delete a stream.
Args:
- request (google.cloud.datastream_v1alpha1.types.DeleteStreamRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.DeleteStreamRequest, dict]):
The request object.
name (str):
Required. The name of the stream
@@ -1342,7 +1339,7 @@ def delete_stream(
def fetch_errors(
self,
- request: datastream.FetchErrorsRequest = None,
+ request: Union[datastream.FetchErrorsRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
@@ -1352,7 +1349,7 @@ def fetch_errors(
stream.
Args:
- request (google.cloud.datastream_v1alpha1.types.FetchErrorsRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.FetchErrorsRequest, dict]):
The request object. Request message for 'FetchErrors'
request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -1404,7 +1401,7 @@ def fetch_errors(
def fetch_static_ips(
self,
- request: datastream.FetchStaticIpsRequest = None,
+ request: Union[datastream.FetchStaticIpsRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1417,7 +1414,7 @@ def fetch_static_ips(
optionally supplied in the request.
Args:
- request (google.cloud.datastream_v1alpha1.types.FetchStaticIpsRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.FetchStaticIpsRequest, dict]):
The request object. Request message for 'FetchStaticIps'
request.
name (str):
@@ -1487,7 +1484,7 @@ def fetch_static_ips(
def create_private_connection(
self,
- request: datastream.CreatePrivateConnectionRequest = None,
+ request: Union[datastream.CreatePrivateConnectionRequest, dict] = None,
*,
parent: str = None,
private_connection: datastream_resources.PrivateConnection = None,
@@ -1500,7 +1497,7 @@ def create_private_connection(
configuration.
Args:
- request (google.cloud.datastream_v1alpha1.types.CreatePrivateConnectionRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.CreatePrivateConnectionRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -1590,7 +1587,7 @@ def create_private_connection(
def get_private_connection(
self,
- request: datastream.GetPrivateConnectionRequest = None,
+ request: Union[datastream.GetPrivateConnectionRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1601,7 +1598,7 @@ def get_private_connection(
connectivity configuration.
Args:
- request (google.cloud.datastream_v1alpha1.types.GetPrivateConnectionRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.GetPrivateConnectionRequest, dict]):
The request object.
name (str):
Required. The name of the private
@@ -1663,7 +1660,7 @@ def get_private_connection(
def list_private_connections(
self,
- request: datastream.ListPrivateConnectionsRequest = None,
+ request: Union[datastream.ListPrivateConnectionsRequest, dict] = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1674,7 +1671,7 @@ def list_private_connections(
configurations in a project and location.
Args:
- request (google.cloud.datastream_v1alpha1.types.ListPrivateConnectionsRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.ListPrivateConnectionsRequest, dict]):
The request object.
parent (str):
Required. The parent that owns the
@@ -1742,7 +1739,7 @@ def list_private_connections(
def delete_private_connection(
self,
- request: datastream.DeletePrivateConnectionRequest = None,
+ request: Union[datastream.DeletePrivateConnectionRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1753,7 +1750,7 @@ def delete_private_connection(
configuration.
Args:
- request (google.cloud.datastream_v1alpha1.types.DeletePrivateConnectionRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.DeletePrivateConnectionRequest, dict]):
The request object.
name (str):
Required. The name of the private
@@ -1836,7 +1833,7 @@ def delete_private_connection(
def create_route(
self,
- request: datastream.CreateRouteRequest = None,
+ request: Union[datastream.CreateRouteRequest, dict] = None,
*,
parent: str = None,
route: datastream_resources.Route = None,
@@ -1849,7 +1846,7 @@ def create_route(
connectivity in a project and location.
Args:
- request (google.cloud.datastream_v1alpha1.types.CreateRouteRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.CreateRouteRequest, dict]):
The request object. route creation request
parent (str):
Required. The parent that owns the
@@ -1936,7 +1933,7 @@ def create_route(
def get_route(
self,
- request: datastream.GetRouteRequest = None,
+ request: Union[datastream.GetRouteRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -1946,7 +1943,7 @@ def get_route(
r"""Use this method to get details about a route.
Args:
- request (google.cloud.datastream_v1alpha1.types.GetRouteRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.GetRouteRequest, dict]):
The request object. route get request
name (str):
Required. The name of the Route
@@ -2008,7 +2005,7 @@ def get_route(
def list_routes(
self,
- request: datastream.ListRoutesRequest = None,
+ request: Union[datastream.ListRoutesRequest, dict] = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -2019,7 +2016,7 @@ def list_routes(
connectivity in a project and location.
Args:
- request (google.cloud.datastream_v1alpha1.types.ListRoutesRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.ListRoutesRequest, dict]):
The request object. route list request
parent (str):
Required. The parent that owns the
@@ -2087,7 +2084,7 @@ def list_routes(
def delete_route(
self,
- request: datastream.DeleteRouteRequest = None,
+ request: Union[datastream.DeleteRouteRequest, dict] = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -2097,7 +2094,7 @@ def delete_route(
r"""Use this method to delete a route.
Args:
- request (google.cloud.datastream_v1alpha1.types.DeleteRouteRequest):
+ request (Union[google.cloud.datastream_v1alpha1.types.DeleteRouteRequest, dict]):
The request object. route deletion request
name (str):
Required. The name of the Route
@@ -2176,6 +2173,19 @@ def delete_route(
# Done; return the response.
return response
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ """Releases underlying transport's resources.
+
+ .. warning::
+ ONLY use as a context manager if the transport is NOT shared
+ with other clients! Exiting the with block will CLOSE the transport
+ and may cause errors in other clients!
+ """
+ self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/pagers.py b/google/cloud/datastream_v1alpha1/services/datastream/pagers.py
index 0c14fa2..8c0e91e 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/pagers.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/pagers.py
@@ -15,13 +15,13 @@
#
from typing import (
Any,
- AsyncIterable,
+ AsyncIterator,
Awaitable,
Callable,
- Iterable,
Sequence,
Tuple,
Optional,
+ Iterator,
)
from google.cloud.datastream_v1alpha1.types import datastream
@@ -75,14 +75,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- def pages(self) -> Iterable[datastream.ListConnectionProfilesResponse]:
+ def pages(self) -> Iterator[datastream.ListConnectionProfilesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
- def __iter__(self) -> Iterable[datastream_resources.ConnectionProfile]:
+ def __iter__(self) -> Iterator[datastream_resources.ConnectionProfile]:
for page in self.pages:
yield from page.connection_profiles
@@ -137,14 +137,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- async def pages(self) -> AsyncIterable[datastream.ListConnectionProfilesResponse]:
+ async def pages(self) -> AsyncIterator[datastream.ListConnectionProfilesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
- def __aiter__(self) -> AsyncIterable[datastream_resources.ConnectionProfile]:
+ def __aiter__(self) -> AsyncIterator[datastream_resources.ConnectionProfile]:
async def async_generator():
async for page in self.pages:
for response in page.connection_profiles:
@@ -203,14 +203,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- def pages(self) -> Iterable[datastream.ListStreamsResponse]:
+ def pages(self) -> Iterator[datastream.ListStreamsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
- def __iter__(self) -> Iterable[datastream_resources.Stream]:
+ def __iter__(self) -> Iterator[datastream_resources.Stream]:
for page in self.pages:
yield from page.streams
@@ -265,14 +265,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- async def pages(self) -> AsyncIterable[datastream.ListStreamsResponse]:
+ async def pages(self) -> AsyncIterator[datastream.ListStreamsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
- def __aiter__(self) -> AsyncIterable[datastream_resources.Stream]:
+ def __aiter__(self) -> AsyncIterator[datastream_resources.Stream]:
async def async_generator():
async for page in self.pages:
for response in page.streams:
@@ -331,14 +331,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- def pages(self) -> Iterable[datastream.FetchStaticIpsResponse]:
+ def pages(self) -> Iterator[datastream.FetchStaticIpsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
- def __iter__(self) -> Iterable[str]:
+ def __iter__(self) -> Iterator[str]:
for page in self.pages:
yield from page.static_ips
@@ -393,14 +393,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- async def pages(self) -> AsyncIterable[datastream.FetchStaticIpsResponse]:
+ async def pages(self) -> AsyncIterator[datastream.FetchStaticIpsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
- def __aiter__(self) -> AsyncIterable[str]:
+ def __aiter__(self) -> AsyncIterator[str]:
async def async_generator():
async for page in self.pages:
for response in page.static_ips:
@@ -459,14 +459,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- def pages(self) -> Iterable[datastream.ListPrivateConnectionsResponse]:
+ def pages(self) -> Iterator[datastream.ListPrivateConnectionsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
- def __iter__(self) -> Iterable[datastream_resources.PrivateConnection]:
+ def __iter__(self) -> Iterator[datastream_resources.PrivateConnection]:
for page in self.pages:
yield from page.private_connections
@@ -521,14 +521,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- async def pages(self) -> AsyncIterable[datastream.ListPrivateConnectionsResponse]:
+ async def pages(self) -> AsyncIterator[datastream.ListPrivateConnectionsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
- def __aiter__(self) -> AsyncIterable[datastream_resources.PrivateConnection]:
+ def __aiter__(self) -> AsyncIterator[datastream_resources.PrivateConnection]:
async def async_generator():
async for page in self.pages:
for response in page.private_connections:
@@ -587,14 +587,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- def pages(self) -> Iterable[datastream.ListRoutesResponse]:
+ def pages(self) -> Iterator[datastream.ListRoutesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
- def __iter__(self) -> Iterable[datastream_resources.Route]:
+ def __iter__(self) -> Iterator[datastream_resources.Route]:
for page in self.pages:
yield from page.routes
@@ -649,14 +649,14 @@ def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
- async def pages(self) -> AsyncIterable[datastream.ListRoutesResponse]:
+ async def pages(self) -> AsyncIterator[datastream.ListRoutesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
- def __aiter__(self) -> AsyncIterable[datastream_resources.Route]:
+ def __aiter__(self) -> AsyncIterator[datastream_resources.Route]:
async def async_generator():
async for page in self.pages:
for response in page.routes:
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/transports/base.py b/google/cloud/datastream_v1alpha1/services/datastream/transports/base.py
index 580dc3d..e034088 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/transports/base.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/transports/base.py
@@ -120,7 +120,7 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
- # If the credentials is service account credentials, then always try to use self signed JWT.
+ # If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
@@ -242,6 +242,15 @@ def _prep_wrapped_messages(self, client_info):
),
}
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc.py b/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc.py
index 5f6f471..dd7c92c 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc.py
@@ -83,16 +83,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -834,5 +834,8 @@ def delete_route(
)
return self._stubs["delete_route"]
+ def close(self):
+ self.grpc_channel.close()
+
__all__ = ("DatastreamGrpcTransport",)
diff --git a/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc_asyncio.py b/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc_asyncio.py
index d5a31cc..b2ee457 100644
--- a/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc_asyncio.py
+++ b/google/cloud/datastream_v1alpha1/services/datastream/transports/grpc_asyncio.py
@@ -130,16 +130,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -853,5 +853,8 @@ def delete_route(
)
return self._stubs["delete_route"]
+ def close(self):
+ return self.grpc_channel.close()
+
__all__ = ("DatastreamGrpcAsyncIOTransport",)
diff --git a/google/cloud/datastream_v1alpha1/types/datastream.py b/google/cloud/datastream_v1alpha1/types/datastream.py
index 46db339..9992189 100644
--- a/google/cloud/datastream_v1alpha1/types/datastream.py
+++ b/google/cloud/datastream_v1alpha1/types/datastream.py
@@ -58,6 +58,7 @@
class DiscoverConnectionProfileRequest(proto.Message):
r"""Request message for 'discover' ConnectionProfile request.
+
Attributes:
parent (str):
Required. The parent resource of the ConnectionProfile type.
@@ -107,6 +108,7 @@ class DiscoverConnectionProfileRequest(proto.Message):
class DiscoverConnectionProfileResponse(proto.Message):
r"""
+
Attributes:
oracle_rdbms (google.cloud.datastream_v1alpha1.types.OracleRdbms):
Enriched Oracle RDBMS object.
@@ -130,6 +132,7 @@ class DiscoverConnectionProfileResponse(proto.Message):
class FetchStaticIpsRequest(proto.Message):
r"""Request message for 'FetchStaticIps' request.
+
Attributes:
name (str):
Required. The name resource of the Response type. Must be in
@@ -149,6 +152,7 @@ class FetchStaticIpsRequest(proto.Message):
class FetchStaticIpsResponse(proto.Message):
r"""Response message for a 'FetchStaticIps' response.
+
Attributes:
static_ips (Sequence[str]):
list of static ips by account
@@ -168,6 +172,7 @@ def raw_page(self):
class FetchErrorsRequest(proto.Message):
r"""Request message for 'FetchErrors' request.
+
Attributes:
stream (str):
Name of the Stream resource for which to
@@ -179,6 +184,7 @@ class FetchErrorsRequest(proto.Message):
class FetchErrorsResponse(proto.Message):
r"""Response message for a 'FetchErrors' response.
+
Attributes:
errors (Sequence[google.cloud.datastream_v1alpha1.types.Error]):
The list of errors on the Stream.
@@ -191,6 +197,7 @@ class FetchErrorsResponse(proto.Message):
class ListConnectionProfilesRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -223,6 +230,7 @@ class ListConnectionProfilesRequest(proto.Message):
class ListConnectionProfilesResponse(proto.Message):
r"""
+
Attributes:
connection_profiles (Sequence[google.cloud.datastream_v1alpha1.types.ConnectionProfile]):
List of connection profiles.
@@ -247,6 +255,7 @@ def raw_page(self):
class GetConnectionProfileRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the connection profile
@@ -258,6 +267,7 @@ class GetConnectionProfileRequest(proto.Message):
class CreateConnectionProfileRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -297,6 +307,7 @@ class CreateConnectionProfileRequest(proto.Message):
class UpdateConnectionProfileRequest(proto.Message):
r"""
+
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask is used to specify the fields to be
@@ -338,6 +349,7 @@ class UpdateConnectionProfileRequest(proto.Message):
class DeleteConnectionProfileRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the connection profile
@@ -368,6 +380,7 @@ class DeleteConnectionProfileRequest(proto.Message):
class ListStreamsRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -399,6 +412,7 @@ class ListStreamsRequest(proto.Message):
class ListStreamsResponse(proto.Message):
r"""
+
Attributes:
streams (Sequence[google.cloud.datastream_v1alpha1.types.Stream]):
List of streams
@@ -423,6 +437,7 @@ def raw_page(self):
class GetStreamRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the stream resource to
@@ -434,6 +449,7 @@ class GetStreamRequest(proto.Message):
class CreateStreamRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -478,6 +494,7 @@ class CreateStreamRequest(proto.Message):
class UpdateStreamRequest(proto.Message):
r"""
+
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask is used to specify the fields to be
@@ -526,6 +543,7 @@ class UpdateStreamRequest(proto.Message):
class DeleteStreamRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the stream resource to
@@ -556,6 +574,7 @@ class DeleteStreamRequest(proto.Message):
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
+
Attributes:
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation was
@@ -600,6 +619,7 @@ class OperationMetadata(proto.Message):
class CreatePrivateConnectionRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -640,6 +660,7 @@ class CreatePrivateConnectionRequest(proto.Message):
class ListPrivateConnectionsRequest(proto.Message):
r"""
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -673,6 +694,7 @@ class ListPrivateConnectionsRequest(proto.Message):
class ListPrivateConnectionsResponse(proto.Message):
r"""
+
Attributes:
private_connections (Sequence[google.cloud.datastream_v1alpha1.types.PrivateConnection]):
List of private connectivity configurations.
@@ -697,6 +719,7 @@ def raw_page(self):
class DeletePrivateConnectionRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the private
@@ -732,6 +755,7 @@ class DeletePrivateConnectionRequest(proto.Message):
class GetPrivateConnectionRequest(proto.Message):
r"""
+
Attributes:
name (str):
Required. The name of the private
@@ -743,6 +767,7 @@ class GetPrivateConnectionRequest(proto.Message):
class CreateRouteRequest(proto.Message):
r"""route creation request
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -779,6 +804,7 @@ class CreateRouteRequest(proto.Message):
class ListRoutesRequest(proto.Message):
r"""route list request
+
Attributes:
parent (str):
Required. The parent that owns the collection
@@ -811,6 +837,7 @@ class ListRoutesRequest(proto.Message):
class ListRoutesResponse(proto.Message):
r"""route list response
+
Attributes:
routes (Sequence[google.cloud.datastream_v1alpha1.types.Route]):
List of Routes.
@@ -835,6 +862,7 @@ def raw_page(self):
class DeleteRouteRequest(proto.Message):
r"""route deletion request
+
Attributes:
name (str):
Required. The name of the Route resource to
@@ -865,6 +893,7 @@ class DeleteRouteRequest(proto.Message):
class GetRouteRequest(proto.Message):
r"""route get request
+
Attributes:
name (str):
Required. The name of the Route resource to
diff --git a/google/cloud/datastream_v1alpha1/types/datastream_resources.py b/google/cloud/datastream_v1alpha1/types/datastream_resources.py
index 14d9bc8..670d704 100644
--- a/google/cloud/datastream_v1alpha1/types/datastream_resources.py
+++ b/google/cloud/datastream_v1alpha1/types/datastream_resources.py
@@ -76,6 +76,7 @@ class SchemaFileFormat(proto.Enum):
class OracleProfile(proto.Message):
r"""Oracle database profile.
+
Attributes:
hostname (str):
Required. Hostname for the Oracle connection.
@@ -102,6 +103,7 @@ class OracleProfile(proto.Message):
class MysqlProfile(proto.Message):
r"""MySQL database profile.
+
Attributes:
hostname (str):
Required. Hostname for the MySQL connection.
@@ -126,6 +128,7 @@ class MysqlProfile(proto.Message):
class GcsProfile(proto.Message):
r"""Cloud Storage bucket profile.
+
Attributes:
bucket_name (str):
Required. The full project and resource path
@@ -140,15 +143,18 @@ class GcsProfile(proto.Message):
class NoConnectivitySettings(proto.Message):
- r"""No connectivity settings. """
+ r"""No connectivity settings.
+ """
class StaticServiceIpConnectivity(proto.Message):
- r"""Static IP address connectivity. """
+ r"""Static IP address connectivity.
+ """
class ForwardSshTunnelConnectivity(proto.Message):
r"""Forward SSH Tunnel connectivity.
+
Attributes:
hostname (str):
Required. Hostname for the SSH tunnel.
@@ -232,6 +238,7 @@ class State(proto.Enum):
class PrivateConnectivity(proto.Message):
r"""Private Connectivity
+
Attributes:
private_connection_name (str):
@@ -273,6 +280,7 @@ class Route(proto.Message):
class MysqlSslConfig(proto.Message):
r"""MySQL SSL configuration information.
+
Attributes:
client_key (str):
Input only. PEM-encoded private key associated with the
@@ -308,6 +316,7 @@ class MysqlSslConfig(proto.Message):
class ConnectionProfile(proto.Message):
r"""
+
Attributes:
name (str):
Output only. The resource's name.
@@ -375,6 +384,7 @@ class ConnectionProfile(proto.Message):
class OracleColumn(proto.Message):
r"""Oracle Column.
+
Attributes:
column_name (str):
Column name.
@@ -412,6 +422,7 @@ class OracleColumn(proto.Message):
class OracleTable(proto.Message):
r"""Oracle table.
+
Attributes:
table_name (str):
Table name.
@@ -429,6 +440,7 @@ class OracleTable(proto.Message):
class OracleSchema(proto.Message):
r"""Oracle schema.
+
Attributes:
schema_name (str):
Schema name.
@@ -442,6 +454,7 @@ class OracleSchema(proto.Message):
class OracleRdbms(proto.Message):
r"""Oracle database structure.
+
Attributes:
oracle_schemas (Sequence[google.cloud.datastream_v1alpha1.types.OracleSchema]):
Oracle schemas/databases in the database
@@ -455,6 +468,7 @@ class OracleRdbms(proto.Message):
class OracleSourceConfig(proto.Message):
r"""Oracle data source configuration
+
Attributes:
allowlist (google.cloud.datastream_v1alpha1.types.OracleRdbms):
Oracle objects to include in the stream.
@@ -468,6 +482,7 @@ class OracleSourceConfig(proto.Message):
class MysqlColumn(proto.Message):
r"""MySQL Column.
+
Attributes:
column_name (str):
Column name.
@@ -502,6 +517,7 @@ class MysqlColumn(proto.Message):
class MysqlTable(proto.Message):
r"""MySQL table.
+
Attributes:
table_name (str):
Table name.
@@ -517,6 +533,7 @@ class MysqlTable(proto.Message):
class MysqlDatabase(proto.Message):
r"""MySQL database.
+
Attributes:
database_name (str):
Database name.
@@ -530,6 +547,7 @@ class MysqlDatabase(proto.Message):
class MysqlRdbms(proto.Message):
r"""MySQL database structure
+
Attributes:
mysql_databases (Sequence[google.cloud.datastream_v1alpha1.types.MysqlDatabase]):
Mysql databases on the server
@@ -542,6 +560,7 @@ class MysqlRdbms(proto.Message):
class MysqlSourceConfig(proto.Message):
r"""MySQL source configuration
+
Attributes:
allowlist (google.cloud.datastream_v1alpha1.types.MysqlRdbms):
MySQL objects to retrieve from the source.
@@ -555,6 +574,7 @@ class MysqlSourceConfig(proto.Message):
class SourceConfig(proto.Message):
r"""The configuration of the stream source.
+
Attributes:
source_connection_profile_name (str):
Required. Source connection profile
@@ -581,11 +601,13 @@ class SourceConfig(proto.Message):
class AvroFileFormat(proto.Message):
- r"""AVRO file format configuration. """
+ r"""AVRO file format configuration.
+ """
class JsonFileFormat(proto.Message):
r"""JSON file format configuration.
+
Attributes:
schema_file_format (google.cloud.datastream_v1alpha1.types.SchemaFileFormat):
The schema file format along JSON data files.
@@ -605,6 +627,7 @@ class JsonCompression(proto.Enum):
class GcsDestinationConfig(proto.Message):
r"""Google Cloud Storage destination configuration
+
Attributes:
path (str):
Path inside the Cloud Storage bucket to write
@@ -641,6 +664,7 @@ class GcsDestinationConfig(proto.Message):
class DestinationConfig(proto.Message):
r"""The configuration of the stream destination.
+
Attributes:
destination_connection_profile_name (str):
Required. Destination connection profile
@@ -660,6 +684,7 @@ class DestinationConfig(proto.Message):
class Stream(proto.Message):
r"""
+
Attributes:
name (str):
Output only. The stream's name.
@@ -725,7 +750,8 @@ class BackfillAllStrategy(proto.Message):
class BackfillNoneStrategy(proto.Message):
r"""Backfill strategy to disable automatic backfill for the
Stream's objects.
- """
+
+ """
name = proto.Field(proto.STRING, number=1,)
create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
@@ -754,6 +780,7 @@ class BackfillNoneStrategy(proto.Message):
class Error(proto.Message):
r"""Represent a user-facing Error.
+
Attributes:
reason (str):
A title that explains the reason for the
@@ -780,6 +807,7 @@ class Error(proto.Message):
class ValidationResult(proto.Message):
r"""Contains the current validation results.
+
Attributes:
validations (Sequence[google.cloud.datastream_v1alpha1.types.Validation]):
A list of validations (includes both executed
@@ -791,6 +819,7 @@ class ValidationResult(proto.Message):
class Validation(proto.Message):
r"""
+
Attributes:
description (str):
A short description of the validation.
@@ -817,6 +846,7 @@ class Status(proto.Enum):
class ValidationMessage(proto.Message):
r"""Represent user-facing validation result message.
+
Attributes:
message (str):
The result of the validation.
diff --git a/noxfile.py b/noxfile.py
index 935a924..2bb4cf7 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -29,7 +29,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
diff --git a/owlbot.py b/owlbot.py
index de41109..af3d932 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -31,50 +31,10 @@
# ----------------------------------------------------------------------------
templated_files = gcp.CommonTemplates().py_library(microgenerator=True)
-python.py_samples(skip_readmes=True)
-s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
-
-# Remove the replacements below once https://github.com/googleapis/synthtool/pull/1188 is merged
-
-# Update googleapis/repo-automation-bots repo to main in .kokoro/*.sh files
-s.replace(".kokoro/*.sh", "repo-automation-bots/tree/master", "repo-automation-bots/tree/main")
-
-# Customize CONTRIBUTING.rst to replace master with main
-s.replace(
- "CONTRIBUTING.rst",
- "fetch and merge changes from upstream into master",
- "fetch and merge changes from upstream into main",
-)
-s.replace(
- "CONTRIBUTING.rst",
- "git merge upstream/master",
- "git merge upstream/main",
-)
-
-s.replace(
- "CONTRIBUTING.rst",
- """export GOOGLE_CLOUD_TESTING_BRANCH=\"master\"""",
- """export GOOGLE_CLOUD_TESTING_BRANCH=\"main\"""",
-)
-
-s.replace(
- "CONTRIBUTING.rst",
- "remote \(``master``\)",
- "remote (``main``)",
-)
-
-s.replace(
- "CONTRIBUTING.rst",
- "blob/master/CONTRIBUTING.rst",
- "blob/main/CONTRIBUTING.rst",
-)
+s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
-s.replace(
- "CONTRIBUTING.rst",
- "blob/master/noxfile.py",
- "blob/main/noxfile.py",
-)
+python.py_samples(skip_readmes=True)
# ----------------------------------------------------------------------------
# Run blacken session
diff --git a/renovate.json b/renovate.json
index c048955..c21036d 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,6 +1,9 @@
{
"extends": [
- "config:base", ":preserveSemverRanges"
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
],
"ignorePaths": [".pre-commit-config.yaml"],
"pip_requirements": {
diff --git a/scripts/fixup_datastream_v1alpha1_keywords.py b/scripts/fixup_datastream_v1alpha1_keywords.py
index 44097c6..636ec17 100644
--- a/scripts/fixup_datastream_v1alpha1_keywords.py
+++ b/scripts/fixup_datastream_v1alpha1_keywords.py
@@ -39,27 +39,27 @@ def partition(
class datastreamCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', ),
- 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', ),
- 'create_route': ('parent', 'route_id', 'route', 'request_id', ),
- 'create_stream': ('parent', 'stream_id', 'stream', 'request_id', 'validate_only', 'force', ),
- 'delete_connection_profile': ('name', 'request_id', ),
- 'delete_private_connection': ('name', 'request_id', 'force', ),
- 'delete_route': ('name', 'request_id', ),
- 'delete_stream': ('name', 'request_id', ),
- 'discover_connection_profile': ('parent', 'connection_profile', 'connection_profile_name', 'recursive', 'recursion_depth', 'oracle_rdbms', 'mysql_rdbms', ),
- 'fetch_errors': ('stream', ),
- 'fetch_static_ips': ('name', 'page_size', 'page_token', ),
- 'get_connection_profile': ('name', ),
- 'get_private_connection': ('name', ),
- 'get_route': ('name', ),
- 'get_stream': ('name', ),
- 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
- 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
- 'list_routes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
- 'list_streams': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
- 'update_connection_profile': ('connection_profile', 'update_mask', 'request_id', ),
- 'update_stream': ('stream', 'update_mask', 'request_id', 'validate_only', 'force', ),
+ 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', ),
+ 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', ),
+ 'create_route': ('parent', 'route_id', 'route', 'request_id', ),
+ 'create_stream': ('parent', 'stream_id', 'stream', 'request_id', 'validate_only', 'force', ),
+ 'delete_connection_profile': ('name', 'request_id', ),
+ 'delete_private_connection': ('name', 'request_id', 'force', ),
+ 'delete_route': ('name', 'request_id', ),
+ 'delete_stream': ('name', 'request_id', ),
+ 'discover_connection_profile': ('parent', 'connection_profile', 'connection_profile_name', 'recursive', 'recursion_depth', 'oracle_rdbms', 'mysql_rdbms', ),
+ 'fetch_errors': ('stream', ),
+ 'fetch_static_ips': ('name', 'page_size', 'page_token', ),
+ 'get_connection_profile': ('name', ),
+ 'get_private_connection': ('name', ),
+ 'get_route': ('name', ),
+ 'get_stream': ('name', ),
+ 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
+ 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
+ 'list_routes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
+ 'list_streams': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
+ 'update_connection_profile': ('connection_profile', 'update_mask', 'request_id', ),
+ 'update_stream': ('stream', 'update_mask', 'request_id', 'validate_only', 'force', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
@@ -78,7 +78,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
return updated
kwargs, ctrl_kwargs = partition(
- lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
diff --git a/setup.py b/setup.py
index 2ed2f30..2cf4b48 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-datastream"
description = "Datastream client library"
-version = "0.1.3"
+version = "0.2.0"
release_status = "Development Status :: 3 - Alpha"
url = "https://github.com/googleapis/python-datastream"
dependencies = [
diff --git a/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/tests/unit/gapic/datastream_v1alpha1/test_datastream.py
index ccbd965..116b55e 100644
--- a/tests/unit/gapic/datastream_v1alpha1/test_datastream.py
+++ b/tests/unit/gapic/datastream_v1alpha1/test_datastream.py
@@ -32,6 +32,7 @@
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
+from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.datastream_v1alpha1.services.datastream import DatastreamAsyncClient
@@ -5686,6 +5687,9 @@ def test_datastream_base_transport():
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
@@ -6263,3 +6267,49 @@ def test_client_withDEFAULT_CLIENT_INFO():
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
+
+
+@pytest.mark.asyncio
+async def test_transport_close_async():
+ client = DatastreamAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close():
+ transports = {
+ "grpc": "_grpc_channel",
+ }
+
+ for transport, close_name in transports.items():
+ client = DatastreamClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, close_name)), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_client_ctx():
+ transports = [
+ "grpc",
+ ]
+ for transport in transports:
+ client = DatastreamClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ # Test client calls underlying transport.
+ with mock.patch.object(type(client.transport), "close") as close:
+ close.assert_not_called()
+ with client:
+ pass
+ close.assert_called()