diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 6d064ddb..6301519a 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407
-# created: 2024-07-31T14:52:44.926548819Z
+ digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562
+# created: 2024-11-12T12:09:45.821174897Z
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
index d4ca9418..932fbdd0 100644
--- a/.github/release-trigger.yml
+++ b/.github/release-trigger.yml
@@ -1 +1,2 @@
enabled: true
+multiScmName: python-datastore
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index 4b8f66f7..319f6e4b 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -29,6 +29,7 @@ branchProtectionRules:
- 'unit (3.10)'
- 'unit (3.11)'
- 'unit (3.12)'
+ - 'unit (3.13)'
- 'cover'
- 'mypy'
# List of explicit permissions to add (additive only)
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
index f4a337c4..6a0bc074 100644
--- a/.github/workflows/unittest.yml
+++ b/.github/workflows/unittest.yml
@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
+ python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -30,6 +30,7 @@ jobs:
with:
name: coverage-artifact-${{ matrix.python }}
path: .coverage-${{ matrix.python }}
+ include-hidden-files: true
cover:
runs-on: ubuntu-latest
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
index 7129c771..8bb07645 100644
--- a/.kokoro/docker/docs/requirements.txt
+++ b/.kokoro/docker/docs/requirements.txt
@@ -1,42 +1,42 @@
#
-# This file is autogenerated by pip-compile with Python 3.9
+# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements.in
#
-argcomplete==3.4.0 \
- --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
- --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
+argcomplete==3.5.1 \
+ --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \
+ --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4
# via nox
-colorlog==6.8.2 \
- --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
- --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
+colorlog==6.9.0 \
+ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \
+ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2
# via nox
-distlib==0.3.8 \
- --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
- --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
+distlib==0.3.9 \
+ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
+ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
# via virtualenv
-filelock==3.15.4 \
- --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
- --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
+filelock==3.16.1 \
+ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
+ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
# via virtualenv
-nox==2024.4.15 \
- --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
- --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
+nox==2024.10.9 \
+ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \
+ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95
# via -r requirements.in
-packaging==24.1 \
- --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
- --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
+packaging==24.2 \
+ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
+ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
# via nox
-platformdirs==4.2.2 \
- --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
- --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
+platformdirs==4.3.6 \
+ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
+ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
# via virtualenv
-tomli==2.0.1 \
- --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
- --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
+tomli==2.0.2 \
+ --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \
+ --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed
# via nox
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
+virtualenv==20.27.1 \
+ --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \
+ --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4
# via nox
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 6b7da47b..33f2bdc7 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -63,4 +63,4 @@ before_action {
keyname: "docuploader_service_account"
}
}
-}
\ No newline at end of file
+}
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index b460d5a0..d3805e02 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1")
+TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2")
cd github/python-datastore
python3 setup.py sdist bdist_wheel
twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 717dc3ee..09d805a9 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -28,7 +28,7 @@ before_action {
fetch_keystore {
keystore_resource {
keystore_config_id: 73713
- keyname: "google-cloud-pypi-token-keystore-1"
+ keyname: "google-cloud-pypi-token-keystore-2"
}
}
}
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
new file mode 100644
index 00000000..33af919b
--- /dev/null
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.13"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-313"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-datastore/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
new file mode 100644
index 00000000..714045a7
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-datastore/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 55910c8b..53e365bc 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Install nox
-python3.9 -m pip install --upgrade --quiet nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index a95c589d..eeb4bcda 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "2.20.1"
+ ".": "2.20.2"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 45db587d..549b44f4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,13 @@
[1]: https://pypi.org/project/google-cloud-datastore/#history
+## [2.20.2](https://github.com/googleapis/python-datastore/compare/v2.20.1...v2.20.2) (2024-12-12)
+
+
+### Bug Fixes
+
+* Preserve list meanings ([#575](https://github.com/googleapis/python-datastore/issues/575)) ([266243b](https://github.com/googleapis/python-datastore/commit/266243ba360a9d41ab4b51c323eac44d2cfc35cb))
+
## [2.20.1](https://github.com/googleapis/python-datastore/compare/v2.20.0...v2.20.1) (2024-08-14)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 854a6c7c..c59f8503 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.12 -- -k
+ $ nox -s unit-3.13 -- -k
.. note::
@@ -252,6 +252,7 @@ We support:
- `Python 3.10`_
- `Python 3.11`_
- `Python 3.12`_
+- `Python 3.13`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
@@ -259,6 +260,7 @@ We support:
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/google/cloud/datastore/gapic_version.py b/google/cloud/datastore/gapic_version.py
index 5feb6ccf..6975b43d 100644
--- a/google/cloud/datastore/gapic_version.py
+++ b/google/cloud/datastore/gapic_version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.20.1" # {x-release-please-version}
+__version__ = "2.20.2" # {x-release-please-version}
diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py
index 6eaa3b89..d491360c 100644
--- a/google/cloud/datastore/helpers.py
+++ b/google/cloud/datastore/helpers.py
@@ -18,7 +18,6 @@
"""
import datetime
-import itertools
from google.protobuf import struct_pb2
from google.type import latlng_pb2
@@ -43,36 +42,29 @@ def _get_meaning(value_pb, is_list=False):
:param is_list: Boolean indicating if the ``value_pb`` contains
a list value.
- :rtype: int
+ :rtype: int | Tuple[Optional[int], Optional[list[int | None]]] | None
:returns: The meaning for the ``value_pb`` if one is set, else
- :data:`None`. For a list value, if there are disagreeing
- means it just returns a list of meanings. If all the
- list meanings agree, it just condenses them.
+ :data:`None`. For a list value, returns a tuple of
+ the root meaning of the list, and a list of meanings
+ of each sub-value. If subvalues are all empty, returns
+ :data:`None` instead of a list.
"""
if is_list:
+ root_meaning = value_pb.meaning or None
values = value_pb.array_value.values
- # An empty list will have no values, hence no shared meaning
- # set among them.
- if len(values) == 0:
- return None
-
# We check among all the meanings, some of which may be None,
# the rest which may be enum/int values.
- all_meanings = [_get_meaning(sub_value_pb) for sub_value_pb in values]
- unique_meanings = set(all_meanings)
-
- if len(unique_meanings) == 1:
- # If there is a unique meaning, we preserve it.
- return unique_meanings.pop()
- else: # We know len(value_pb.array_value.values) > 0.
- # If the meaning is not unique, just return all of them.
- return all_meanings
-
- elif value_pb.meaning: # Simple field (int32).
- return value_pb.meaning
-
- return None
+ sub_meanings = [sub_value_pb.meaning or None for sub_value_pb in values]
+ if not any(meaning is not None for meaning in sub_meanings):
+ sub_meanings = None
+ if root_meaning is None and sub_meanings is None:
+ # no meanings to save
+ return None
+ else:
+ return root_meaning, sub_meanings
+ else:
+ return value_pb.meaning or None
def _new_value_pb(entity_pb, name):
@@ -156,6 +148,10 @@ def entity_from_protobuf(pb):
def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False):
"""Add meaning information (from an entity) to a protobuf.
+ value_pb is assumed to have no `meaning` data currently present.
+ This means if the entity's meaning data is None, this function will do nothing,
+ rather than removing any existing data.
+
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
@@ -181,14 +177,20 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False):
if orig_value is not value:
return
- # For lists, we set meaning on each sub-element.
- if is_list:
- if not isinstance(meaning, list):
- meaning = itertools.repeat(meaning)
- val_iter = zip(value_pb.array_value.values, meaning)
- for sub_value_pb, sub_meaning in val_iter:
- if sub_meaning is not None:
- sub_value_pb.meaning = sub_meaning
+ if meaning is None:
+ # no meaning data to set
+ return
+ elif is_list:
+ # for lists, set meaning on the root pb and on each sub-element
+ root_meaning, sub_meaning_list = meaning
+ if root_meaning is not None:
+ value_pb.meaning = root_meaning
+ if sub_meaning_list:
+ for sub_value_pb, sub_meaning in zip(
+ value_pb.array_value.values, sub_meaning_list
+ ):
+ if sub_meaning is not None:
+ sub_value_pb.meaning = sub_meaning
else:
value_pb.meaning = meaning
diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py
index 02ee97e2..e37230a6 100644
--- a/google/cloud/datastore/version.py
+++ b/google/cloud/datastore/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.20.1"
+__version__ = "2.20.2"
diff --git a/google/cloud/datastore_admin/gapic_version.py b/google/cloud/datastore_admin/gapic_version.py
index 5585b0b1..4c1787c5 100644
--- a/google/cloud/datastore_admin/gapic_version.py
+++ b/google/cloud/datastore_admin/gapic_version.py
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "2.20.1" # {x-release-please-version}
+__version__ = "2.20.2" # {x-release-please-version}
diff --git a/google/cloud/datastore_admin_v1/gapic_version.py b/google/cloud/datastore_admin_v1/gapic_version.py
index 9b4d43d6..efd18799 100644
--- a/google/cloud/datastore_admin_v1/gapic_version.py
+++ b/google/cloud/datastore_admin_v1/gapic_version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.20.1" # {x-release-please-version}
+__version__ = "2.20.2" # {x-release-please-version}
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py
index bc248882..0c2572d6 100644
--- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py
@@ -1129,11 +1129,7 @@ async def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1186,11 +1182,7 @@ async def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1247,11 +1239,7 @@ async def delete_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.delete_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1304,11 +1292,7 @@ async def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py
index 388ed08a..ca54ec3c 100644
--- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py
@@ -1528,11 +1528,7 @@ def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1585,11 +1581,7 @@ def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1646,11 +1638,7 @@ def delete_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1703,11 +1691,7 @@ def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst
new file mode 100644
index 00000000..fe3ac4f1
--- /dev/null
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`DatastoreAdminTransport` is the ABC for all transports.
+- public child `DatastoreAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `DatastoreAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseDatastoreAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `DatastoreAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py
index 8c3a00f3..bcfb2688 100644
--- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py
@@ -185,6 +185,26 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
def close(self):
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py
index 7526fc5c..366878db 100644
--- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
@@ -284,6 +285,9 @@ def __init__(
)
# Wrap messages. This must be done after self._grpc_channel exists
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
self._prep_wrapped_messages(client_info)
@property
@@ -527,27 +531,27 @@ def list_indexes(
def _prep_wrapped_messages(self, client_info):
"""Precompute the wrapped methods, overriding the base class method to use async wrappers."""
self._wrapped_methods = {
- self.export_entities: gapic_v1.method_async.wrap_method(
+ self.export_entities: self._wrap_method(
self.export_entities,
default_timeout=60.0,
client_info=client_info,
),
- self.import_entities: gapic_v1.method_async.wrap_method(
+ self.import_entities: self._wrap_method(
self.import_entities,
default_timeout=60.0,
client_info=client_info,
),
- self.create_index: gapic_v1.method_async.wrap_method(
+ self.create_index: self._wrap_method(
self.create_index,
default_timeout=60.0,
client_info=client_info,
),
- self.delete_index: gapic_v1.method_async.wrap_method(
+ self.delete_index: self._wrap_method(
self.delete_index,
default_timeout=60.0,
client_info=client_info,
),
- self.get_index: gapic_v1.method_async.wrap_method(
+ self.get_index: self._wrap_method(
self.get_index,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -562,7 +566,7 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
- self.list_indexes: gapic_v1.method_async.wrap_method(
+ self.list_indexes: self._wrap_method(
self.list_indexes,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -577,11 +581,40 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
+ self.cancel_operation: self._wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_operation: self._wrap_method(
+ self.delete_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: self._wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: self._wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
return self.grpc_channel.close()
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
+
@property
def delete_operation(
self,
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py
index 8776e623..01fcdd85 100644
--- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py
@@ -16,38 +16,34 @@
from google.auth.transport.requests import AuthorizedSession # type: ignore
import json # type: ignore
-import grpc # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
from google.api_core import rest_helpers
from google.api_core import rest_streaming
-from google.api_core import path_template
from google.api_core import gapic_v1
from google.protobuf import json_format
from google.api_core import operations_v1
+
from requests import __version__ as requests_version
import dataclasses
-import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import warnings
-try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
-except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object, None] # type: ignore
-
from google.cloud.datastore_admin_v1.types import datastore_admin
from google.cloud.datastore_admin_v1.types import index
from google.longrunning import operations_pb2 # type: ignore
-from .base import (
- DatastoreAdminTransport,
- DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO,
-)
+
+from .rest_base import _BaseDatastoreAdminRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError: # pragma: NO COVER
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -358,8 +354,8 @@ class DatastoreAdminRestStub:
_interceptor: DatastoreAdminRestInterceptor
-class DatastoreAdminRestTransport(DatastoreAdminTransport):
- """REST backend transport for DatastoreAdmin.
+class DatastoreAdminRestTransport(_BaseDatastoreAdminRestTransport):
+ """REST backend synchronous transport for DatastoreAdmin.
Google Cloud Datastore Admin API
@@ -421,7 +417,6 @@ class DatastoreAdminRestTransport(DatastoreAdminTransport):
and call it.
It sends JSON representations of protocol buffers over HTTP/1.1
-
"""
def __init__(
@@ -475,21 +470,12 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
+ url_scheme=url_scheme,
api_audience=api_audience,
)
self._session = AuthorizedSession(
@@ -553,9 +539,34 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient:
# Return the client from cache.
return self._operations_client
- class _CreateIndex(DatastoreAdminRestStub):
+ class _CreateIndex(
+ _BaseDatastoreAdminRestTransport._BaseCreateIndex, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("CreateIndex")
+ return hash("DatastoreAdminRestTransport.CreateIndex")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -585,44 +596,32 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}/indexes",
- "body": "index",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_http_options()
+ )
request, metadata = self._interceptor.pre_create_index(request, metadata)
- pb_request = datastore_admin.CreateIndexRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_query_params_json(
+ transcoded_request
)
- query_params["$alt"] = "json;enum-encoding=int"
-
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreAdminRestTransport._CreateIndex._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -636,9 +635,33 @@ def __call__(
resp = self._interceptor.post_create_index(resp)
return resp
- class _DeleteIndex(DatastoreAdminRestStub):
+ class _DeleteIndex(
+ _BaseDatastoreAdminRestTransport._BaseDeleteIndex, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("DeleteIndex")
+ return hash("DatastoreAdminRestTransport.DeleteIndex")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -668,37 +691,27 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "delete",
- "uri": "/v1/projects/{project_id}/indexes/{index_id}",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_http_options()
+ )
request, metadata = self._interceptor.pre_delete_index(request, metadata)
- pb_request = datastore_admin.DeleteIndexRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_query_params_json(
+ transcoded_request
)
- query_params["$alt"] = "json;enum-encoding=int"
-
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = DatastoreAdminRestTransport._DeleteIndex._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -712,19 +725,34 @@ def __call__(
resp = self._interceptor.post_delete_index(resp)
return resp
- class _ExportEntities(DatastoreAdminRestStub):
+ class _ExportEntities(
+ _BaseDatastoreAdminRestTransport._BaseExportEntities, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("ExportEntities")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreAdminRestTransport.ExportEntities")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -754,45 +782,32 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:export",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseExportEntities._get_http_options()
+ )
request, metadata = self._interceptor.pre_export_entities(request, metadata)
- pb_request = datastore_admin.ExportEntitiesRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreAdminRestTransport._ExportEntities._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -806,9 +821,33 @@ def __call__(
resp = self._interceptor.post_export_entities(resp)
return resp
- class _GetIndex(DatastoreAdminRestStub):
+ class _GetIndex(
+ _BaseDatastoreAdminRestTransport._BaseGetIndex, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("GetIndex")
+ return hash("DatastoreAdminRestTransport.GetIndex")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -835,37 +874,31 @@ def __call__(
Datastore composite index definition.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/projects/{project_id}/indexes/{index_id}",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseGetIndex._get_http_options()
+ )
request, metadata = self._interceptor.pre_get_index(request, metadata)
- pb_request = datastore_admin.GetIndexRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = (
+ _BaseDatastoreAdminRestTransport._BaseGetIndex._get_transcoded_request(
+ http_options, request
+ )
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreAdminRestTransport._BaseGetIndex._get_query_params_json(
+ transcoded_request
)
)
- query_params["$alt"] = "json;enum-encoding=int"
-
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = DatastoreAdminRestTransport._GetIndex._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -881,19 +914,34 @@ def __call__(
resp = self._interceptor.post_get_index(resp)
return resp
- class _ImportEntities(DatastoreAdminRestStub):
+ class _ImportEntities(
+ _BaseDatastoreAdminRestTransport._BaseImportEntities, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("ImportEntities")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreAdminRestTransport.ImportEntities")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -923,45 +971,32 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:import",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseImportEntities._get_http_options()
+ )
request, metadata = self._interceptor.pre_import_entities(request, metadata)
- pb_request = datastore_admin.ImportEntitiesRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreAdminRestTransport._ImportEntities._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -975,9 +1010,33 @@ def __call__(
resp = self._interceptor.post_import_entities(resp)
return resp
- class _ListIndexes(DatastoreAdminRestStub):
+ class _ListIndexes(
+ _BaseDatastoreAdminRestTransport._BaseListIndexes, DatastoreAdminRestStub
+ ):
def __hash__(self):
- return hash("ListIndexes")
+ return hash("DatastoreAdminRestTransport.ListIndexes")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -1006,37 +1065,27 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/projects/{project_id}/indexes",
- },
- ]
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseListIndexes._get_http_options()
+ )
request, metadata = self._interceptor.pre_list_indexes(request, metadata)
- pb_request = datastore_admin.ListIndexesRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_query_params_json(
+ transcoded_request
)
- query_params["$alt"] = "json;enum-encoding=int"
-
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = DatastoreAdminRestTransport._ListIndexes._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1104,7 +1153,34 @@ def list_indexes(
def cancel_operation(self):
return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore
- class _CancelOperation(DatastoreAdminRestStub):
+ class _CancelOperation(
+ _BaseDatastoreAdminRestTransport._BaseCancelOperation, DatastoreAdminRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreAdminRestTransport.CancelOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.CancelOperationRequest,
@@ -1125,34 +1201,29 @@ def __call__(
sent along with the request as metadata.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/{name=projects/*/operations/*}:cancel",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_cancel_operation(
request, metadata
)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_query_params_json(
+ transcoded_request
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreAdminRestTransport._CancelOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1166,7 +1237,34 @@ def __call__(
def delete_operation(self):
return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore
- class _DeleteOperation(DatastoreAdminRestStub):
+ class _DeleteOperation(
+ _BaseDatastoreAdminRestTransport._BaseDeleteOperation, DatastoreAdminRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreAdminRestTransport.DeleteOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.DeleteOperationRequest,
@@ -1187,34 +1285,29 @@ def __call__(
sent along with the request as metadata.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "delete",
- "uri": "/v1/{name=projects/*/operations/*}",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_delete_operation(
request, metadata
)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_query_params_json(
+ transcoded_request
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreAdminRestTransport._DeleteOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1228,7 +1321,34 @@ def __call__(
def get_operation(self):
return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
- class _GetOperation(DatastoreAdminRestStub):
+ class _GetOperation(
+ _BaseDatastoreAdminRestTransport._BaseGetOperation, DatastoreAdminRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreAdminRestTransport.GetOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.GetOperationRequest,
@@ -1252,32 +1372,27 @@ def __call__(
operations_pb2.Operation: Response from GetOperation method.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/{name=projects/*/operations/*}",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseGetOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_get_operation(request, metadata)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_query_params_json(
+ transcoded_request
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreAdminRestTransport._GetOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1285,8 +1400,9 @@ def __call__(
if response.status_code >= 400:
raise core_exceptions.from_http_response(response)
+ content = response.content.decode("utf-8")
resp = operations_pb2.Operation()
- resp = json_format.Parse(response.content.decode("utf-8"), resp)
+ resp = json_format.Parse(content, resp)
resp = self._interceptor.post_get_operation(resp)
return resp
@@ -1294,7 +1410,34 @@ def __call__(
def list_operations(self):
return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore
- class _ListOperations(DatastoreAdminRestStub):
+ class _ListOperations(
+ _BaseDatastoreAdminRestTransport._BaseListOperations, DatastoreAdminRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreAdminRestTransport.ListOperations")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.ListOperationsRequest,
@@ -1318,32 +1461,27 @@ def __call__(
operations_pb2.ListOperationsResponse: Response from ListOperations method.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/{name=projects/*}/operations",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreAdminRestTransport._BaseListOperations._get_http_options()
+ )
request, metadata = self._interceptor.pre_list_operations(request, metadata)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreAdminRestTransport._BaseListOperations._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = _BaseDatastoreAdminRestTransport._BaseListOperations._get_query_params_json(
+ transcoded_request
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreAdminRestTransport._ListOperations._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1351,8 +1489,9 @@ def __call__(
if response.status_code >= 400:
raise core_exceptions.from_http_response(response)
+ content = response.content.decode("utf-8")
resp = operations_pb2.ListOperationsResponse()
- resp = json_format.Parse(response.content.decode("utf-8"), resp)
+ resp = json_format.Parse(content, resp)
resp = self._interceptor.post_list_operations(resp)
return resp
diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py
new file mode 100644
index 00000000..a94eece5
--- /dev/null
+++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py
@@ -0,0 +1,447 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.datastore_admin_v1.types import datastore_admin
+from google.cloud.datastore_admin_v1.types import index
+from google.longrunning import operations_pb2 # type: ignore
+
+
+class _BaseDatastoreAdminRestTransport(DatastoreAdminTransport):
+ """Base REST backend transport for DatastoreAdmin.
+
+ Note: This class is not meant to be used directly. Use its sync and
+ async sub-classes instead.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "datastore.googleapis.com",
+ credentials: Optional[Any] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ api_audience: Optional[str] = None,
+ ) -> None:
+ """Instantiate the transport.
+ Args:
+ host (Optional[str]):
+ The hostname to connect to (default: 'datastore.googleapis.com').
+ credentials (Optional[Any]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you are developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ # Run the base constructor
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ api_audience=api_audience,
+ )
+
+ class _BaseCreateIndex:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}/indexes",
+ "body": "index",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.CreateIndexRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseDeleteIndex:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "delete",
+ "uri": "/v1/projects/{project_id}/indexes/{index_id}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.DeleteIndexRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseExportEntities:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:export",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.ExportEntitiesRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreAdminRestTransport._BaseExportEntities._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseGetIndex:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/projects/{project_id}/indexes/{index_id}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.GetIndexRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseImportEntities:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:import",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.ImportEntitiesRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreAdminRestTransport._BaseImportEntities._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseListIndexes:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/projects/{project_id}/indexes",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore_admin.ListIndexesRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseCancelOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/{name=projects/*/operations/*}:cancel",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseDeleteOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "delete",
+ "uri": "/v1/{name=projects/*/operations/*}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseGetOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/{name=projects/*/operations/*}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseListOperations:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/{name=projects/*}/operations",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+
+__all__ = ("_BaseDatastoreAdminRestTransport",)
diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py
index b1855aff..a417fe1e 100644
--- a/google/cloud/datastore_v1/__init__.py
+++ b/google/cloud/datastore_v1/__init__.py
@@ -34,6 +34,7 @@
from .types.datastore import Mutation
from .types.datastore import MutationResult
from .types.datastore import PropertyMask
+from .types.datastore import PropertyTransform
from .types.datastore import ReadOptions
from .types.datastore import ReserveIdsRequest
from .types.datastore import ReserveIdsResponse
@@ -53,6 +54,7 @@
from .types.query import CompositeFilter
from .types.query import EntityResult
from .types.query import Filter
+from .types.query import FindNearest
from .types.query import GqlQuery
from .types.query import GqlQueryParameter
from .types.query import KindExpression
@@ -87,6 +89,7 @@
"ExplainMetrics",
"ExplainOptions",
"Filter",
+ "FindNearest",
"GqlQuery",
"GqlQueryParameter",
"Key",
@@ -102,6 +105,7 @@
"PropertyMask",
"PropertyOrder",
"PropertyReference",
+ "PropertyTransform",
"Query",
"QueryResultBatch",
"ReadOptions",
diff --git a/google/cloud/datastore_v1/gapic_version.py b/google/cloud/datastore_v1/gapic_version.py
index 9b4d43d6..efd18799 100644
--- a/google/cloud/datastore_v1/gapic_version.py
+++ b/google/cloud/datastore_v1/gapic_version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.20.1" # {x-release-please-version}
+__version__ = "2.20.2" # {x-release-please-version}
diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py
index 2f0ba4f0..fcef7a8b 100644
--- a/google/cloud/datastore_v1/services/datastore/async_client.py
+++ b/google/cloud/datastore_v1/services/datastore/async_client.py
@@ -360,13 +360,18 @@ async def sample_lookup():
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[self._client._transport.lookup]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -446,13 +451,18 @@ async def sample_run_query():
self._client._transport.run_query
]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -532,13 +542,18 @@ async def sample_run_aggregation_query():
self._client._transport.run_aggregation_query
]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -640,13 +655,18 @@ async def sample_begin_transaction():
self._client._transport.begin_transaction
]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -792,13 +812,18 @@ async def sample_commit():
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[self._client._transport.commit]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -911,13 +936,18 @@ async def sample_rollback():
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[self._client._transport.rollback]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -1032,13 +1062,18 @@ async def sample_allocate_ids():
self._client._transport.allocate_ids
]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -1152,13 +1187,18 @@ async def sample_reserve_ids():
self._client._transport.reserve_ids
]
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata(
- (("project_id", request.project_id),)
- ),
- )
+ header_params = {}
+
+ if request.project_id:
+ header_params["project_id"] = request.project_id
+
+ if request.database_id:
+ header_params["database_id"] = request.database_id
+
+ if header_params:
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(header_params),
+ )
# Validate the universe domain.
self._client._validate_universe_domain()
@@ -1205,11 +1245,7 @@ async def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1262,11 +1298,7 @@ async def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1323,11 +1355,7 @@ async def delete_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.delete_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1380,11 +1408,7 @@ async def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py
index 6717a09f..ea6fba23 100644
--- a/google/cloud/datastore_v1/services/datastore/client.py
+++ b/google/cloud/datastore_v1/services/datastore/client.py
@@ -1645,11 +1645,7 @@ def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1702,11 +1698,7 @@ def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1763,11 +1755,7 @@ def delete_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1820,11 +1808,7 @@ def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
diff --git a/google/cloud/datastore_v1/services/datastore/transports/README.rst b/google/cloud/datastore_v1/services/datastore/transports/README.rst
new file mode 100644
index 00000000..77f9e34e
--- /dev/null
+++ b/google/cloud/datastore_v1/services/datastore/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`DatastoreTransport` is the ABC for all transports.
+- public child `DatastoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `DatastoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseDatastoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `DatastoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py
index db08f5b4..cb18d369 100644
--- a/google/cloud/datastore_v1/services/datastore/transports/base.py
+++ b/google/cloud/datastore_v1/services/datastore/transports/base.py
@@ -213,6 +213,26 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
def close(self):
diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py
index b826d7c6..4d943696 100644
--- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py
+++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
@@ -234,6 +235,9 @@ def __init__(
)
# Wrap messages. This must be done after self._grpc_channel exists
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
self._prep_wrapped_messages(client_info)
@property
@@ -470,7 +474,7 @@ def reserve_ids(
def _prep_wrapped_messages(self, client_info):
"""Precompute the wrapped methods, overriding the base class method to use async wrappers."""
self._wrapped_methods = {
- self.lookup: gapic_v1.method_async.wrap_method(
+ self.lookup: self._wrap_method(
self.lookup,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -485,7 +489,7 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
- self.run_query: gapic_v1.method_async.wrap_method(
+ self.run_query: self._wrap_method(
self.run_query,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -500,7 +504,7 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
- self.run_aggregation_query: gapic_v1.method_async.wrap_method(
+ self.run_aggregation_query: self._wrap_method(
self.run_aggregation_query,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -515,27 +519,27 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
- self.begin_transaction: gapic_v1.method_async.wrap_method(
+ self.begin_transaction: self._wrap_method(
self.begin_transaction,
default_timeout=60.0,
client_info=client_info,
),
- self.commit: gapic_v1.method_async.wrap_method(
+ self.commit: self._wrap_method(
self.commit,
default_timeout=60.0,
client_info=client_info,
),
- self.rollback: gapic_v1.method_async.wrap_method(
+ self.rollback: self._wrap_method(
self.rollback,
default_timeout=60.0,
client_info=client_info,
),
- self.allocate_ids: gapic_v1.method_async.wrap_method(
+ self.allocate_ids: self._wrap_method(
self.allocate_ids,
default_timeout=60.0,
client_info=client_info,
),
- self.reserve_ids: gapic_v1.method_async.wrap_method(
+ self.reserve_ids: self._wrap_method(
self.reserve_ids,
default_retry=retries.AsyncRetry(
initial=0.1,
@@ -550,11 +554,40 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
+ self.cancel_operation: self._wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_operation: self._wrap_method(
+ self.delete_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: self._wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: self._wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
return self.grpc_channel.close()
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
+
@property
def delete_operation(
self,
diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest.py b/google/cloud/datastore_v1/services/datastore/transports/rest.py
index 24551618..abb7d45c 100644
--- a/google/cloud/datastore_v1/services/datastore/transports/rest.py
+++ b/google/cloud/datastore_v1/services/datastore/transports/rest.py
@@ -16,33 +16,32 @@
from google.auth.transport.requests import AuthorizedSession # type: ignore
import json # type: ignore
-import grpc # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
from google.api_core import rest_helpers
from google.api_core import rest_streaming
-from google.api_core import path_template
from google.api_core import gapic_v1
from google.protobuf import json_format
+
from requests import __version__ as requests_version
import dataclasses
-import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import warnings
-try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
-except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object, None] # type: ignore
-
from google.cloud.datastore_v1.types import datastore
from google.longrunning import operations_pb2 # type: ignore
-from .base import DatastoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+from .rest_base import _BaseDatastoreRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError: # pragma: NO COVER
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -405,8 +404,8 @@ class DatastoreRestStub:
_interceptor: DatastoreRestInterceptor
-class DatastoreRestTransport(DatastoreTransport):
- """REST backend transport for Datastore.
+class DatastoreRestTransport(_BaseDatastoreRestTransport):
+ """REST backend synchronous transport for Datastore.
Each RPC normalizes the partition IDs of the keys in its
input entities, and always returns entities with keys with
@@ -421,7 +420,6 @@ class DatastoreRestTransport(DatastoreTransport):
and call it.
It sends JSON representations of protocol buffers over HTTP/1.1
-
"""
def __init__(
@@ -475,21 +473,12 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
+ url_scheme=url_scheme,
api_audience=api_audience,
)
self._session = AuthorizedSession(
@@ -500,19 +489,32 @@ def __init__(
self._interceptor = interceptor or DatastoreRestInterceptor()
self._prep_wrapped_messages(client_info)
- class _AllocateIds(DatastoreRestStub):
+ class _AllocateIds(_BaseDatastoreRestTransport._BaseAllocateIds, DatastoreRestStub):
def __hash__(self):
- return hash("AllocateIds")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.AllocateIds")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -541,45 +543,36 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:allocateIds",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreRestTransport._BaseAllocateIds._get_http_options()
+ )
request, metadata = self._interceptor.pre_allocate_ids(request, metadata)
- pb_request = datastore.AllocateIdsRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseAllocateIds._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseAllocateIds._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseAllocateIds._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._AllocateIds._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -595,19 +588,34 @@ def __call__(
resp = self._interceptor.post_allocate_ids(resp)
return resp
- class _BeginTransaction(DatastoreRestStub):
+ class _BeginTransaction(
+ _BaseDatastoreRestTransport._BaseBeginTransaction, DatastoreRestStub
+ ):
def __hash__(self):
- return hash("BeginTransaction")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.BeginTransaction")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -636,47 +644,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:beginTransaction",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreRestTransport._BaseBeginTransaction._get_http_options()
+ )
request, metadata = self._interceptor.pre_begin_transaction(
request, metadata
)
- pb_request = datastore.BeginTransactionRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseDatastoreRestTransport._BaseBeginTransaction._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseBeginTransaction._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreRestTransport._BaseBeginTransaction._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._BeginTransaction._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -692,19 +687,32 @@ def __call__(
resp = self._interceptor.post_begin_transaction(resp)
return resp
- class _Commit(DatastoreRestStub):
+ class _Commit(_BaseDatastoreRestTransport._BaseCommit, DatastoreRestStub):
def __hash__(self):
- return hash("Commit")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.Commit")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -733,45 +741,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:commit",
- "body": "*",
- },
- ]
+ http_options = _BaseDatastoreRestTransport._BaseCommit._get_http_options()
request, metadata = self._interceptor.pre_commit(request, metadata)
- pb_request = datastore.CommitRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseCommit._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseCommit._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseCommit._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._Commit._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -787,19 +784,32 @@ def __call__(
resp = self._interceptor.post_commit(resp)
return resp
- class _Lookup(DatastoreRestStub):
+ class _Lookup(_BaseDatastoreRestTransport._BaseLookup, DatastoreRestStub):
def __hash__(self):
- return hash("Lookup")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.Lookup")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -828,45 +838,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:lookup",
- "body": "*",
- },
- ]
+ http_options = _BaseDatastoreRestTransport._BaseLookup._get_http_options()
request, metadata = self._interceptor.pre_lookup(request, metadata)
- pb_request = datastore.LookupRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseLookup._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseLookup._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseLookup._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._Lookup._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -882,19 +881,32 @@ def __call__(
resp = self._interceptor.post_lookup(resp)
return resp
- class _ReserveIds(DatastoreRestStub):
+ class _ReserveIds(_BaseDatastoreRestTransport._BaseReserveIds, DatastoreRestStub):
def __hash__(self):
- return hash("ReserveIds")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.ReserveIds")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -923,45 +935,36 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:reserveIds",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreRestTransport._BaseReserveIds._get_http_options()
+ )
request, metadata = self._interceptor.pre_reserve_ids(request, metadata)
- pb_request = datastore.ReserveIdsRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseReserveIds._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseReserveIds._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseReserveIds._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._ReserveIds._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -977,19 +980,32 @@ def __call__(
resp = self._interceptor.post_reserve_ids(resp)
return resp
- class _Rollback(DatastoreRestStub):
+ class _Rollback(_BaseDatastoreRestTransport._BaseRollback, DatastoreRestStub):
def __hash__(self):
- return hash("Rollback")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.Rollback")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -1019,45 +1035,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:rollback",
- "body": "*",
- },
- ]
+ http_options = _BaseDatastoreRestTransport._BaseRollback._get_http_options()
request, metadata = self._interceptor.pre_rollback(request, metadata)
- pb_request = datastore.RollbackRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseRollback._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseRollback._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseRollback._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._Rollback._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1073,19 +1078,34 @@ def __call__(
resp = self._interceptor.post_rollback(resp)
return resp
- class _RunAggregationQuery(DatastoreRestStub):
+ class _RunAggregationQuery(
+ _BaseDatastoreRestTransport._BaseRunAggregationQuery, DatastoreRestStub
+ ):
def __hash__(self):
- return hash("RunAggregationQuery")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.RunAggregationQuery")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -1114,47 +1134,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:runAggregationQuery",
- "body": "*",
- },
- ]
+ http_options = (
+ _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_http_options()
+ )
request, metadata = self._interceptor.pre_run_aggregation_query(
request, metadata
)
- pb_request = datastore.RunAggregationQueryRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._RunAggregationQuery._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1170,19 +1177,32 @@ def __call__(
resp = self._interceptor.post_run_aggregation_query(resp)
return resp
- class _RunQuery(DatastoreRestStub):
+ class _RunQuery(_BaseDatastoreRestTransport._BaseRunQuery, DatastoreRestStub):
def __hash__(self):
- return hash("RunQuery")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("DatastoreRestTransport.RunQuery")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -1211,45 +1231,34 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/projects/{project_id}:runQuery",
- "body": "*",
- },
- ]
+ http_options = _BaseDatastoreRestTransport._BaseRunQuery._get_http_options()
request, metadata = self._interceptor.pre_run_query(request, metadata)
- pb_request = datastore.RunQueryRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseRunQuery._get_transcoded_request(
+ http_options, request
+ )
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseDatastoreRestTransport._BaseRunQuery._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
+ query_params = (
+ _BaseDatastoreRestTransport._BaseRunQuery._get_query_params_json(
+ transcoded_request
)
)
- query_params.update(self._get_unset_required_fields(query_params))
-
- query_params["$alt"] = "json;enum-encoding=int"
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = DatastoreRestTransport._RunQuery._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1333,7 +1342,34 @@ def run_query(
def cancel_operation(self):
return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore
- class _CancelOperation(DatastoreRestStub):
+ class _CancelOperation(
+ _BaseDatastoreRestTransport._BaseCancelOperation, DatastoreRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreRestTransport.CancelOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.CancelOperationRequest,
@@ -1354,34 +1390,31 @@ def __call__(
sent along with the request as metadata.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1/{name=projects/*/operations/*}:cancel",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreRestTransport._BaseCancelOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_cancel_operation(
request, metadata
)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreRestTransport._BaseCancelOperation._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = (
+ _BaseDatastoreRestTransport._BaseCancelOperation._get_query_params_json(
+ transcoded_request
+ )
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreRestTransport._CancelOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1395,7 +1428,34 @@ def __call__(
def delete_operation(self):
return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore
- class _DeleteOperation(DatastoreRestStub):
+ class _DeleteOperation(
+ _BaseDatastoreRestTransport._BaseDeleteOperation, DatastoreRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreRestTransport.DeleteOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.DeleteOperationRequest,
@@ -1416,34 +1476,31 @@ def __call__(
sent along with the request as metadata.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "delete",
- "uri": "/v1/{name=projects/*/operations/*}",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreRestTransport._BaseDeleteOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_delete_operation(
request, metadata
)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseDatastoreRestTransport._BaseDeleteOperation._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = (
+ _BaseDatastoreRestTransport._BaseDeleteOperation._get_query_params_json(
+ transcoded_request
+ )
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreRestTransport._DeleteOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1457,7 +1514,34 @@ def __call__(
def get_operation(self):
return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
- class _GetOperation(DatastoreRestStub):
+ class _GetOperation(
+ _BaseDatastoreRestTransport._BaseGetOperation, DatastoreRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreRestTransport.GetOperation")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.GetOperationRequest,
@@ -1481,32 +1565,31 @@ def __call__(
operations_pb2.Operation: Response from GetOperation method.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/{name=projects/*/operations/*}",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreRestTransport._BaseGetOperation._get_http_options()
+ )
request, metadata = self._interceptor.pre_get_operation(request, metadata)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseGetOperation._get_transcoded_request(
+ http_options, request
+ )
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = (
+ _BaseDatastoreRestTransport._BaseGetOperation._get_query_params_json(
+ transcoded_request
+ )
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreRestTransport._GetOperation._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1514,8 +1597,9 @@ def __call__(
if response.status_code >= 400:
raise core_exceptions.from_http_response(response)
+ content = response.content.decode("utf-8")
resp = operations_pb2.Operation()
- resp = json_format.Parse(response.content.decode("utf-8"), resp)
+ resp = json_format.Parse(content, resp)
resp = self._interceptor.post_get_operation(resp)
return resp
@@ -1523,7 +1607,34 @@ def __call__(
def list_operations(self):
return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore
- class _ListOperations(DatastoreRestStub):
+ class _ListOperations(
+ _BaseDatastoreRestTransport._BaseListOperations, DatastoreRestStub
+ ):
+ def __hash__(self):
+ return hash("DatastoreRestTransport.ListOperations")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
+
def __call__(
self,
request: operations_pb2.ListOperationsRequest,
@@ -1547,32 +1658,31 @@ def __call__(
operations_pb2.ListOperationsResponse: Response from ListOperations method.
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1/{name=projects/*}/operations",
- },
- ]
-
+ http_options = (
+ _BaseDatastoreRestTransport._BaseListOperations._get_http_options()
+ )
request, metadata = self._interceptor.pre_list_operations(request, metadata)
- request_kwargs = json_format.MessageToDict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = (
+ _BaseDatastoreRestTransport._BaseListOperations._get_transcoded_request(
+ http_options, request
+ )
+ )
# Jsonify the query params
- query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ query_params = (
+ _BaseDatastoreRestTransport._BaseListOperations._get_query_params_json(
+ transcoded_request
+ )
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
-
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
+ response = DatastoreRestTransport._ListOperations._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -1580,8 +1690,9 @@ def __call__(
if response.status_code >= 400:
raise core_exceptions.from_http_response(response)
+ content = response.content.decode("utf-8")
resp = operations_pb2.ListOperationsResponse()
- resp = json_format.Parse(response.content.decode("utf-8"), resp)
+ resp = json_format.Parse(content, resp)
resp = self._interceptor.post_list_operations(resp)
return resp
diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest_base.py b/google/cloud/datastore_v1/services/datastore/transports/rest_base.py
new file mode 100644
index 00000000..c8d5c675
--- /dev/null
+++ b/google/cloud/datastore_v1/services/datastore/transports/rest_base.py
@@ -0,0 +1,650 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import DatastoreTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.datastore_v1.types import datastore
+from google.longrunning import operations_pb2 # type: ignore
+
+
+class _BaseDatastoreRestTransport(DatastoreTransport):
+ """Base REST backend transport for Datastore.
+
+ Note: This class is not meant to be used directly. Use its sync and
+ async sub-classes instead.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "datastore.googleapis.com",
+ credentials: Optional[Any] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ api_audience: Optional[str] = None,
+ ) -> None:
+ """Instantiate the transport.
+ Args:
+ host (Optional[str]):
+ The hostname to connect to (default: 'datastore.googleapis.com').
+ credentials (Optional[Any]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you are developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ # Run the base constructor
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ api_audience=api_audience,
+ )
+
+ class _BaseAllocateIds:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:allocateIds",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.AllocateIdsRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseAllocateIds._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseBeginTransaction:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:beginTransaction",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.BeginTransactionRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseBeginTransaction._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseCommit:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:commit",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.CommitRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseCommit._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseLookup:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:lookup",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.LookupRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseLookup._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseReserveIds:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:reserveIds",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.ReserveIdsRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseReserveIds._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseRollback:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:rollback",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.RollbackRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseRollback._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseRunAggregationQuery:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:runAggregationQuery",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.RunAggregationQueryRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseRunQuery:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/projects/{project_id}:runQuery",
+ "body": "*",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = datastore.RunQueryRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseDatastoreRestTransport._BaseRunQuery._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseCancelOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1/{name=projects/*/operations/*}:cancel",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseDeleteOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "delete",
+ "uri": "/v1/{name=projects/*/operations/*}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseGetOperation:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/{name=projects/*/operations/*}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+ class _BaseListOperations:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1/{name=projects/*}/operations",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ request_kwargs = json_format.MessageToDict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(json.dumps(transcoded_request["query_params"]))
+ return query_params
+
+
+__all__ = ("_BaseDatastoreRestTransport",)
diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py
index 3ae809b4..0efe33ff 100644
--- a/google/cloud/datastore_v1/types/__init__.py
+++ b/google/cloud/datastore_v1/types/__init__.py
@@ -29,6 +29,7 @@
Mutation,
MutationResult,
PropertyMask,
+ PropertyTransform,
ReadOptions,
ReserveIdsRequest,
ReserveIdsResponse,
@@ -52,6 +53,7 @@
CompositeFilter,
EntityResult,
Filter,
+ FindNearest,
GqlQuery,
GqlQueryParameter,
KindExpression,
@@ -83,6 +85,7 @@
"Mutation",
"MutationResult",
"PropertyMask",
+ "PropertyTransform",
"ReadOptions",
"ReserveIdsRequest",
"ReserveIdsResponse",
@@ -102,6 +105,7 @@
"CompositeFilter",
"EntityResult",
"Filter",
+ "FindNearest",
"GqlQuery",
"GqlQueryParameter",
"KindExpression",
diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py
index 11974c3d..281866f5 100644
--- a/google/cloud/datastore_v1/types/datastore.py
+++ b/google/cloud/datastore_v1/types/datastore.py
@@ -46,6 +46,7 @@
"ReserveIdsRequest",
"ReserveIdsResponse",
"Mutation",
+ "PropertyTransform",
"MutationResult",
"PropertyMask",
"ReadOptions",
@@ -796,6 +797,10 @@ class Mutation(proto.Message):
mutation conflicts.
This field is a member of `oneof`_ ``conflict_detection_strategy``.
+ conflict_resolution_strategy (google.cloud.datastore_v1.types.Mutation.ConflictResolutionStrategy):
+ The strategy to use when a conflict is detected. Defaults to
+ ``SERVER_VALUE``. If this is set, then
+ ``conflict_detection_strategy`` must also be set.
property_mask (google.cloud.datastore_v1.types.PropertyMask):
The properties to write in this mutation. None of the
properties in the mask may have a reserved name, except for
@@ -804,8 +809,31 @@ class Mutation(proto.Message):
If the entity already exists, only properties referenced in
the mask are updated, others are left untouched. Properties
referenced in the mask but not in the entity are deleted.
+ property_transforms (MutableSequence[google.cloud.datastore_v1.types.PropertyTransform]):
+ Optional. The transforms to perform on the entity.
+
+ This field can be set only when the operation is ``insert``,
+ ``update``, or ``upsert``. If present, the transforms are be
+ applied to the entity regardless of the property mask, in
+ order, after the operation.
"""
+ class ConflictResolutionStrategy(proto.Enum):
+ r"""The possible ways to resolve a conflict detected in a
+ mutation.
+
+ Values:
+ STRATEGY_UNSPECIFIED (0):
+ Unspecified. Defaults to ``SERVER_VALUE``.
+ SERVER_VALUE (1):
+ The server entity is kept.
+ FAIL (3):
+ The whole commit request fails.
+ """
+ STRATEGY_UNSPECIFIED = 0
+ SERVER_VALUE = 1
+ FAIL = 3
+
insert: entity.Entity = proto.Field(
proto.MESSAGE,
number=4,
@@ -841,11 +869,199 @@ class Mutation(proto.Message):
oneof="conflict_detection_strategy",
message=timestamp_pb2.Timestamp,
)
+ conflict_resolution_strategy: ConflictResolutionStrategy = proto.Field(
+ proto.ENUM,
+ number=10,
+ enum=ConflictResolutionStrategy,
+ )
property_mask: "PropertyMask" = proto.Field(
proto.MESSAGE,
number=9,
message="PropertyMask",
)
+ property_transforms: MutableSequence["PropertyTransform"] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=12,
+ message="PropertyTransform",
+ )
+
+
+class PropertyTransform(proto.Message):
+ r"""A transformation of an entity property.
+
+ This message has `oneof`_ fields (mutually exclusive fields).
+ For each oneof, at most one member field can be set at the same time.
+ Setting any member of the oneof automatically clears all other
+ members.
+
+ .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+ Attributes:
+ property (str):
+ Optional. The name of the property.
+
+ Property paths (a list of property names separated by dots
+ (``.``)) may be used to refer to properties inside entity
+ values. For example ``foo.bar`` means the property ``bar``
+ inside the entity property ``foo``.
+
+ If a property name contains a dot ``.`` or a backlslash
+ ``\``, then that name must be escaped.
+ set_to_server_value (google.cloud.datastore_v1.types.PropertyTransform.ServerValue):
+ Sets the property to the given server value.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ increment (google.cloud.datastore_v1.types.Value):
+ Adds the given value to the property's
+ current value.
+ This must be an integer or a double value.
+ If the property is not an integer or double, or
+ if the property does not yet exist, the
+ transformation will set the property to the
+ given value. If either of the given value or the
+ current property value are doubles, both values
+ will be interpreted as doubles. Double
+ arithmetic and representation of double values
+ follows IEEE 754 semantics. If there is
+ positive/negative integer overflow, the property
+ is resolved to the largest magnitude
+ positive/negative integer.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ maximum (google.cloud.datastore_v1.types.Value):
+ Sets the property to the maximum of its
+ current value and the given value.
+
+ This must be an integer or a double value.
+ If the property is not an integer or double, or
+ if the property does not yet exist, the
+ transformation will set the property to the
+ given value. If a maximum operation is applied
+ where the property and the input value are of
+ mixed types (that is - one is an integer and one
+ is a double) the property takes on the type of
+ the larger operand. If the operands are
+ equivalent (e.g. 3 and 3.0), the property does
+ not change. 0, 0.0, and -0.0 are all zero. The
+ maximum of a zero stored value and zero input
+ value is always the stored value.
+ The maximum of any numeric value x and NaN is
+ NaN.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ minimum (google.cloud.datastore_v1.types.Value):
+ Sets the property to the minimum of its
+ current value and the given value.
+
+ This must be an integer or a double value.
+ If the property is not an integer or double, or
+ if the property does not yet exist, the
+ transformation will set the property to the
+ input value. If a minimum operation is applied
+ where the property and the input value are of
+ mixed types (that is - one is an integer and one
+ is a double) the property takes on the type of
+ the smaller operand. If the operands are
+ equivalent (e.g. 3 and 3.0), the property does
+ not change. 0, 0.0, and -0.0 are all zero. The
+ minimum of a zero stored value and zero input
+ value is always the stored value. The minimum of
+ any numeric value x and NaN is NaN.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ append_missing_elements (google.cloud.datastore_v1.types.ArrayValue):
+ Appends the given elements in order if they
+ are not already present in the current property
+ value. If the property is not an array, or if
+ the property does not yet exist, it is first set
+ to the empty array.
+
+ Equivalent numbers of different types (e.g. 3L
+ and 3.0) are considered equal when checking if a
+ value is missing. NaN is equal to NaN, and the
+ null value is equal to the null value. If the
+ input contains multiple equivalent values, only
+ the first will be considered.
+
+ The corresponding transform result will be the
+ null value.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ remove_all_from_array (google.cloud.datastore_v1.types.ArrayValue):
+ Removes all of the given elements from the
+ array in the property. If the property is not an
+ array, or if the property does not yet exist, it
+ is set to the empty array.
+
+ Equivalent numbers of different types (e.g. 3L
+ and 3.0) are considered equal when deciding
+ whether an element should be removed. NaN is
+ equal to NaN, and the null value is equal to the
+ null value. This will remove all equivalent
+ values if there are duplicates.
+
+ The corresponding transform result will be the
+ null value.
+
+ This field is a member of `oneof`_ ``transform_type``.
+ """
+
+ class ServerValue(proto.Enum):
+ r"""A value that is calculated by the server.
+
+ Values:
+ SERVER_VALUE_UNSPECIFIED (0):
+ Unspecified. This value must not be used.
+ REQUEST_TIME (1):
+ The time at which the server processed the
+ request, with millisecond precision. If used on
+ multiple properties (same or different entities)
+ in a transaction, all the properties will get
+ the same server timestamp.
+ """
+ SERVER_VALUE_UNSPECIFIED = 0
+ REQUEST_TIME = 1
+
+ property: str = proto.Field(
+ proto.STRING,
+ number=1,
+ )
+ set_to_server_value: ServerValue = proto.Field(
+ proto.ENUM,
+ number=2,
+ oneof="transform_type",
+ enum=ServerValue,
+ )
+ increment: entity.Value = proto.Field(
+ proto.MESSAGE,
+ number=3,
+ oneof="transform_type",
+ message=entity.Value,
+ )
+ maximum: entity.Value = proto.Field(
+ proto.MESSAGE,
+ number=4,
+ oneof="transform_type",
+ message=entity.Value,
+ )
+ minimum: entity.Value = proto.Field(
+ proto.MESSAGE,
+ number=5,
+ oneof="transform_type",
+ message=entity.Value,
+ )
+ append_missing_elements: entity.ArrayValue = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ oneof="transform_type",
+ message=entity.ArrayValue,
+ )
+ remove_all_from_array: entity.ArrayValue = proto.Field(
+ proto.MESSAGE,
+ number=7,
+ oneof="transform_type",
+ message=entity.ArrayValue,
+ )
class MutationResult(proto.Message):
@@ -878,6 +1094,10 @@ class MutationResult(proto.Message):
Whether a conflict was detected for this
mutation. Always false when a conflict detection
strategy field is not set in the mutation.
+ transform_results (MutableSequence[google.cloud.datastore_v1.types.Value]):
+ The results of applying each
+ [PropertyTransform][google.datastore.v1.PropertyTransform],
+ in the same order of the request.
"""
key: entity.Key = proto.Field(
@@ -903,6 +1123,11 @@ class MutationResult(proto.Message):
proto.BOOL,
number=5,
)
+ transform_results: MutableSequence[entity.Value] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=8,
+ message=entity.Value,
+ )
class PropertyMask(proto.Message):
diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py
index 2ce1000f..1f8679cc 100644
--- a/google/cloud/datastore_v1/types/query.py
+++ b/google/cloud/datastore_v1/types/query.py
@@ -37,6 +37,7 @@
"Filter",
"CompositeFilter",
"PropertyFilter",
+ "FindNearest",
"GqlQuery",
"GqlQueryParameter",
"QueryResultBatch",
@@ -132,6 +133,16 @@ class ResultType(proto.Enum):
class Query(proto.Message):
r"""A query for entities.
+ The query stages are executed in the following order:
+
+ 1. kind
+ 2. filter
+ 3. projection
+ 4. order + start_cursor + end_cursor
+ 5. offset
+ 6. limit
+ 7. find_nearest
+
Attributes:
projection (MutableSequence[google.cloud.datastore_v1.types.Projection]):
The projection to return. Defaults to
@@ -175,6 +186,13 @@ class Query(proto.Message):
Applies after all other constraints. Optional.
Unspecified is interpreted as no limit.
Must be >= 0 if specified.
+ find_nearest (google.cloud.datastore_v1.types.FindNearest):
+ Optional. A potential Nearest Neighbors
+ Search.
+ Applies after all other filters and ordering.
+
+ Finds the closest vector embeddings to the given
+ query vector.
"""
projection: MutableSequence["Projection"] = proto.RepeatedField(
@@ -219,6 +237,11 @@ class Query(proto.Message):
number=12,
message=wrappers_pb2.Int32Value,
)
+ find_nearest: "FindNearest" = proto.Field(
+ proto.MESSAGE,
+ number=13,
+ message="FindNearest",
+ )
class AggregationQuery(proto.Message):
@@ -712,6 +735,110 @@ class Operator(proto.Enum):
)
+class FindNearest(proto.Message):
+ r"""Nearest Neighbors search config. The ordering provided by
+ FindNearest supersedes the order_by stage. If multiple documents
+ have the same vector distance, the returned document order is not
+ guaranteed to be stable between queries.
+
+ Attributes:
+ vector_property (google.cloud.datastore_v1.types.PropertyReference):
+ Required. An indexed vector property to search upon. Only
+ documents which contain vectors whose dimensionality match
+ the query_vector can be returned.
+ query_vector (google.cloud.datastore_v1.types.Value):
+ Required. The query vector that we are
+ searching on. Must be a vector of no more than
+ 2048 dimensions.
+ distance_measure (google.cloud.datastore_v1.types.FindNearest.DistanceMeasure):
+ Required. The Distance Measure to use,
+ required.
+ limit (google.protobuf.wrappers_pb2.Int32Value):
+ Required. The number of nearest neighbors to
+ return. Must be a positive integer of no more
+ than 100.
+ distance_result_property (str):
+ Optional. Optional name of the field to output the result of
+ the vector distance calculation. Must conform to [entity
+ property][google.datastore.v1.Entity.properties]
+ limitations.
+ distance_threshold (google.protobuf.wrappers_pb2.DoubleValue):
+ Optional. Option to specify a threshold for which no less
+ similar documents will be returned. The behavior of the
+ specified ``distance_measure`` will affect the meaning of
+ the distance threshold. Since DOT_PRODUCT distances increase
+ when the vectors are more similar, the comparison is
+ inverted.
+
+ For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold
+ For DOT_PRODUCT: WHERE distance >= distance_threshold
+ """
+
+ class DistanceMeasure(proto.Enum):
+ r"""The distance measure to use when comparing vectors.
+
+ Values:
+ DISTANCE_MEASURE_UNSPECIFIED (0):
+ Should not be set.
+ EUCLIDEAN (1):
+ Measures the EUCLIDEAN distance between the vectors. See
+ `Euclidean `__
+ to learn more. The resulting distance decreases the more
+ similar two vectors are.
+ COSINE (2):
+ COSINE distance compares vectors based on the angle between
+ them, which allows you to measure similarity that isn't
+ based on the vectors magnitude. We recommend using
+ DOT_PRODUCT with unit normalized vectors instead of COSINE
+ distance, which is mathematically equivalent with better
+ performance. See `Cosine
+ Similarity `__
+ to learn more about COSINE similarity and COSINE distance.
+ The resulting COSINE distance decreases the more similar two
+ vectors are.
+ DOT_PRODUCT (3):
+ Similar to cosine but is affected by the magnitude of the
+ vectors. See `Dot
+ Product `__ to
+ learn more. The resulting distance increases the more
+ similar two vectors are.
+ """
+ DISTANCE_MEASURE_UNSPECIFIED = 0
+ EUCLIDEAN = 1
+ COSINE = 2
+ DOT_PRODUCT = 3
+
+ vector_property: "PropertyReference" = proto.Field(
+ proto.MESSAGE,
+ number=1,
+ message="PropertyReference",
+ )
+ query_vector: gd_entity.Value = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ message=gd_entity.Value,
+ )
+ distance_measure: DistanceMeasure = proto.Field(
+ proto.ENUM,
+ number=3,
+ enum=DistanceMeasure,
+ )
+ limit: wrappers_pb2.Int32Value = proto.Field(
+ proto.MESSAGE,
+ number=4,
+ message=wrappers_pb2.Int32Value,
+ )
+ distance_result_property: str = proto.Field(
+ proto.STRING,
+ number=5,
+ )
+ distance_threshold: wrappers_pb2.DoubleValue = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ message=wrappers_pb2.DoubleValue,
+ )
+
+
class GqlQuery(proto.Message):
r"""A `GQL
query `__.
diff --git a/noxfile.py b/noxfile.py
index 4a08c70f..7fcab220 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -34,7 +34,15 @@
DEFAULT_PYTHON_VERSION = "3.8"
-UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+UNIT_TEST_PYTHON_VERSIONS: List[str] = [
+ "3.7",
+ "3.8",
+ "3.9",
+ "3.10",
+ "3.11",
+ "3.12",
+ "3.13",
+]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
@@ -66,7 +74,6 @@
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
"unit",
"system",
@@ -77,6 +84,7 @@
"blacken",
"docs",
"doctests",
+ "docfx",
"format",
]
@@ -189,7 +197,7 @@ def install_unittest_dependencies(session, *constraints):
def unit(session, protobuf_implementation):
# Install all test dependencies, then install this package in-place.
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"):
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
session.skip("cpp implementation is not supported in python 3.11+")
constraints_path = str(
@@ -407,7 +415,7 @@ def docfx(session):
)
-@nox.session(python="3.12")
+@nox.session(python="3.13")
@nox.parametrize(
"protobuf_implementation",
["python", "upb", "cpp"],
@@ -415,7 +423,7 @@ def docfx(session):
def prerelease_deps(session, protobuf_implementation):
"""Run all tests with prerelease versions of dependencies installed."""
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"):
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
session.skip("cpp implementation is not supported in python 3.11+")
# Install all dependencies
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 483b5590..a169b5b5 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index 5bccacc5..8816d485 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1 +1 @@
-google-cloud-datastore==2.19.0
\ No newline at end of file
+google-cloud-datastore==2.20.0
\ No newline at end of file
diff --git a/samples/snippets/schedule-export/noxfile.py b/samples/snippets/schedule-export/noxfile.py
index 483b5590..a169b5b5 100644
--- a/samples/snippets/schedule-export/noxfile.py
+++ b/samples/snippets/schedule-export/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/snippets/schedule-export/requirements.txt b/samples/snippets/schedule-export/requirements.txt
index b748abdc..7f503957 100644
--- a/samples/snippets/schedule-export/requirements.txt
+++ b/samples/snippets/schedule-export/requirements.txt
@@ -1 +1 @@
-google-cloud-datastore==2.19.0
+google-cloud-datastore==2.20.0
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py
index 6501a012..f880423c 100644
--- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py
+++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py
@@ -24,7 +24,7 @@
import grpc
from grpc.experimental import aio
-from collections.abc import Iterable
+from collections.abc import Iterable, AsyncIterable
from google.protobuf import json_format
import json
import math
@@ -37,6 +37,13 @@
from requests.sessions import Session
from google.protobuf import json_format
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
@@ -66,10 +73,24 @@
import google.auth
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -1167,25 +1188,6 @@ def test_export_entities(request_type, transport: str = "grpc"):
assert isinstance(response, future.Future)
-def test_export_entities_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.export_entities), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.export_entities()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ExportEntitiesRequest()
-
-
def test_export_entities_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1256,27 +1258,6 @@ def test_export_entities_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_export_entities_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.export_entities), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- operations_pb2.Operation(name="operations/spam")
- )
- response = await client.export_entities()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ExportEntitiesRequest()
-
-
@pytest.mark.asyncio
async def test_export_entities_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1285,7 +1266,7 @@ async def test_export_entities_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1329,7 +1310,7 @@ async def test_export_entities_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1392,7 +1373,7 @@ def test_export_entities_field_headers():
@pytest.mark.asyncio
async def test_export_entities_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1477,7 +1458,7 @@ def test_export_entities_flattened_error():
@pytest.mark.asyncio
async def test_export_entities_flattened_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1518,7 +1499,7 @@ async def test_export_entities_flattened_async():
@pytest.mark.asyncio
async def test_export_entities_flattened_error_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1566,25 +1547,6 @@ def test_import_entities(request_type, transport: str = "grpc"):
assert isinstance(response, future.Future)
-def test_import_entities_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.import_entities), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.import_entities()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ImportEntitiesRequest()
-
-
def test_import_entities_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1655,27 +1617,6 @@ def test_import_entities_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_import_entities_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.import_entities), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- operations_pb2.Operation(name="operations/spam")
- )
- response = await client.import_entities()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ImportEntitiesRequest()
-
-
@pytest.mark.asyncio
async def test_import_entities_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1684,7 +1625,7 @@ async def test_import_entities_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1728,7 +1669,7 @@ async def test_import_entities_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1791,7 +1732,7 @@ def test_import_entities_field_headers():
@pytest.mark.asyncio
async def test_import_entities_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1876,7 +1817,7 @@ def test_import_entities_flattened_error():
@pytest.mark.asyncio
async def test_import_entities_flattened_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1917,7 +1858,7 @@ async def test_import_entities_flattened_async():
@pytest.mark.asyncio
async def test_import_entities_flattened_error_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1965,25 +1906,6 @@ def test_create_index(request_type, transport: str = "grpc"):
assert isinstance(response, future.Future)
-def test_create_index_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.create_index), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.create_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.CreateIndexRequest()
-
-
def test_create_index_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2052,27 +1974,6 @@ def test_create_index_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_create_index_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.create_index), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- operations_pb2.Operation(name="operations/spam")
- )
- response = await client.create_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.CreateIndexRequest()
-
-
@pytest.mark.asyncio
async def test_create_index_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -2081,7 +1982,7 @@ async def test_create_index_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2125,7 +2026,7 @@ async def test_create_index_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2188,7 +2089,7 @@ def test_create_index_field_headers():
@pytest.mark.asyncio
async def test_create_index_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2250,25 +2151,6 @@ def test_delete_index(request_type, transport: str = "grpc"):
assert isinstance(response, future.Future)
-def test_delete_index_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.delete_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.DeleteIndexRequest()
-
-
def test_delete_index_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2339,27 +2221,6 @@ def test_delete_index_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_delete_index_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- operations_pb2.Operation(name="operations/spam")
- )
- response = await client.delete_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.DeleteIndexRequest()
-
-
@pytest.mark.asyncio
async def test_delete_index_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -2368,7 +2229,7 @@ async def test_delete_index_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2412,7 +2273,7 @@ async def test_delete_index_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2476,7 +2337,7 @@ def test_delete_index_field_headers():
@pytest.mark.asyncio
async def test_delete_index_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2550,25 +2411,6 @@ def test_get_index(request_type, transport: str = "grpc"):
assert response.state == index.Index.State.CREATING
-def test_get_index_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.get_index), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.get_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.GetIndexRequest()
-
-
def test_get_index_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2634,40 +2476,13 @@ def test_get_index_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_get_index_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.get_index), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- index.Index(
- project_id="project_id_value",
- index_id="index_id_value",
- kind="kind_value",
- ancestor=index.Index.AncestorMode.NONE,
- state=index.Index.State.CREATING,
- )
- )
- response = await client.get_index()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.GetIndexRequest()
-
-
@pytest.mark.asyncio
async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2706,7 +2521,7 @@ async def test_get_index_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2781,7 +2596,7 @@ def test_get_index_field_headers():
@pytest.mark.asyncio
async def test_get_index_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2845,25 +2660,6 @@ def test_list_indexes(request_type, transport: str = "grpc"):
assert response.next_page_token == "next_page_token_value"
-def test_list_indexes_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.list_indexes()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ListIndexesRequest()
-
-
def test_list_indexes_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2931,29 +2727,6 @@ def test_list_indexes_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_list_indexes_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore_admin.ListIndexesResponse(
- next_page_token="next_page_token_value",
- )
- )
- response = await client.list_indexes()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore_admin.ListIndexesRequest()
-
-
@pytest.mark.asyncio
async def test_list_indexes_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -2962,7 +2735,7 @@ async def test_list_indexes_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3001,7 +2774,7 @@ async def test_list_indexes_async(
transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest
):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3067,7 +2840,7 @@ def test_list_indexes_field_headers():
@pytest.mark.asyncio
async def test_list_indexes_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3194,7 +2967,7 @@ def test_list_indexes_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_indexes_async_pager():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3244,7 +3017,7 @@ async def test_list_indexes_async_pager():
@pytest.mark.asyncio
async def test_list_indexes_async_pages():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3290,41 +3063,6 @@ async def test_list_indexes_async_pages():
assert page_.raw_page.next_page_token == token
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore_admin.ExportEntitiesRequest,
- dict,
- ],
-)
-def test_export_entities_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(name="operations/spam")
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.export_entities(request)
-
- # Establish that the response is the type that we expect.
- assert response.operation.name == "operations/spam"
-
-
def test_export_entities_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -3458,89 +3196,6 @@ def test_export_entities_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_export_entities_rest_interceptors(null_interceptor):
- transport = transports.DatastoreAdminRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.DatastoreAdminRestInterceptor(),
- )
- client = DatastoreAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_export_entities"
- ) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_export_entities"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore_admin.ExportEntitiesRequest.pb(
- datastore_admin.ExportEntitiesRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = json_format.MessageToJson(
- operations_pb2.Operation()
- )
-
- request = datastore_admin.ExportEntitiesRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = operations_pb2.Operation()
-
- client.export_entities(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_export_entities_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.ExportEntitiesRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.export_entities(request)
-
-
def test_export_entities_rest_flattened():
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3600,47 +3255,6 @@ def test_export_entities_rest_flattened_error(transport: str = "rest"):
)
-def test_export_entities_rest_error():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore_admin.ImportEntitiesRequest,
- dict,
- ],
-)
-def test_import_entities_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(name="operations/spam")
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.import_entities(request)
-
- # Establish that the response is the type that we expect.
- assert response.operation.name == "operations/spam"
-
-
def test_import_entities_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -3774,89 +3388,6 @@ def test_import_entities_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_import_entities_rest_interceptors(null_interceptor):
- transport = transports.DatastoreAdminRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.DatastoreAdminRestInterceptor(),
- )
- client = DatastoreAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_import_entities"
- ) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_import_entities"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore_admin.ImportEntitiesRequest.pb(
- datastore_admin.ImportEntitiesRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = json_format.MessageToJson(
- operations_pb2.Operation()
- )
-
- request = datastore_admin.ImportEntitiesRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = operations_pb2.Operation()
-
- client.import_entities(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_import_entities_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.ImportEntitiesRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.import_entities(request)
-
-
def test_import_entities_rest_flattened():
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3916,123 +3447,47 @@ def test_import_entities_rest_flattened_error(transport: str = "rest"):
)
-def test_import_entities_rest_error():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
+def test_create_index_rest_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore_admin.CreateIndexRequest,
- dict,
- ],
-)
-def test_create_index_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
+ # Ensure method has been cached
+ assert client._transport.create_index in client._transport._wrapped_methods
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request_init["index"] = {
- "project_id": "project_id_value",
- "index_id": "index_id_value",
- "kind": "kind_value",
- "ancestor": 1,
- "properties": [{"name": "name_value", "direction": 1}],
- "state": 1,
- }
- # The version of a generated dependency at test runtime may differ from the version used during generation.
- # Delete any fields which are not present in the current runtime dependency
- # See https://github.com/googleapis/gapic-generator-python/issues/1748
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.create_index] = mock_rpc
- # Determine if the message type is proto-plus or protobuf
- test_field = datastore_admin.CreateIndexRequest.meta.fields["index"]
+ request = {}
+ client.create_index(request)
- def get_message_fields(field):
- # Given a field which is a message (composite type), return a list with
- # all the fields of the message.
- # If the field is not a composite type, return an empty list.
- message_fields = []
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
- if hasattr(field, "message") and field.message:
- is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+ # Operation methods build a cached wrapper on first rpc call
+ # subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
- if is_field_type_proto_plus_type:
- message_fields = field.message.meta.fields.values()
- # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
- else: # pragma: NO COVER
- message_fields = field.message.DESCRIPTOR.fields
- return message_fields
-
- runtime_nested_fields = [
- (field.name, nested_field.name)
- for field in get_message_fields(test_field)
- for nested_field in get_message_fields(field)
- ]
-
- subfields_not_in_runtime = []
-
- # For each item in the sample request, create a list of sub fields which are not present at runtime
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for field, value in request_init["index"].items(): # pragma: NO COVER
- result = None
- is_repeated = False
- # For repeated fields
- if isinstance(value, list) and len(value):
- is_repeated = True
- result = value[0]
- # For fields where the type is another message
- if isinstance(value, dict):
- result = value
-
- if result and hasattr(result, "keys"):
- for subfield in result.keys():
- if (field, subfield) not in runtime_nested_fields:
- subfields_not_in_runtime.append(
- {
- "field": field,
- "subfield": subfield,
- "is_repeated": is_repeated,
- }
- )
-
- # Remove fields from the sample request which are not present in the runtime version of the dependency
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
- field = subfield_to_delete.get("field")
- field_repeated = subfield_to_delete.get("is_repeated")
- subfield = subfield_to_delete.get("subfield")
- if subfield:
- if field_repeated:
- for i in range(0, len(request_init["index"][field])):
- del request_init["index"][field][i][subfield]
- else:
- del request_init["index"][field][subfield]
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(name="operations/spam")
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.create_index(request)
+ client.create_index(request)
- # Establish that the response is the type that we expect.
- assert response.operation.name == "operations/spam"
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
-def test_create_index_rest_use_cached_wrapped_rpc():
+def test_delete_index_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
@@ -4046,17 +3501,17 @@ def test_create_index_rest_use_cached_wrapped_rpc():
wrapper_fn.reset_mock()
# Ensure method has been cached
- assert client._transport.create_index in client._transport._wrapped_methods
+ assert client._transport.delete_index in client._transport._wrapped_methods
# Replace cached wrapped function with mock
mock_rpc = mock.Mock()
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[client._transport.create_index] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc
request = {}
- client.create_index(request)
+ client.delete_index(request)
# Establish that the underlying gRPC stub method was called.
assert mock_rpc.call_count == 1
@@ -4065,138 +3520,50 @@ def test_create_index_rest_use_cached_wrapped_rpc():
# subsequent calls should use the cached wrapper
wrapper_fn.reset_mock()
- client.create_index(request)
+ client.delete_index(request)
# Establish that a new wrapper was not created for this call
assert wrapper_fn.call_count == 0
assert mock_rpc.call_count == 2
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_create_index_rest_interceptors(null_interceptor):
- transport = transports.DatastoreAdminRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.DatastoreAdminRestInterceptor(),
- )
- client = DatastoreAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_create_index"
- ) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_create_index"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore_admin.CreateIndexRequest.pb(
- datastore_admin.CreateIndexRequest()
+def test_get_index_rest_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = json_format.MessageToJson(
- operations_pb2.Operation()
- )
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- request = datastore_admin.CreateIndexRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = operations_pb2.Operation()
+ # Ensure method has been cached
+ assert client._transport.get_index in client._transport._wrapped_methods
- client.create_index(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
)
+ client._transport._wrapped_methods[client._transport.get_index] = mock_rpc
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_create_index_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.CreateIndexRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.create_index(request)
-
-
-def test_create_index_rest_error():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore_admin.DeleteIndexRequest,
- dict,
- ],
-)
-def test_delete_index_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1", "index_id": "sample2"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(name="operations/spam")
+ request = {}
+ client.get_index(request)
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.delete_index(request)
+ client.get_index(request)
- # Establish that the response is the type that we expect.
- assert response.operation.name == "operations/spam"
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
-def test_delete_index_rest_use_cached_wrapped_rpc():
+def test_list_indexes_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
@@ -4210,70 +3577,1012 @@ def test_delete_index_rest_use_cached_wrapped_rpc():
wrapper_fn.reset_mock()
# Ensure method has been cached
- assert client._transport.delete_index in client._transport._wrapped_methods
+ assert client._transport.list_indexes in client._transport._wrapped_methods
# Replace cached wrapped function with mock
mock_rpc = mock.Mock()
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc
request = {}
- client.delete_index(request)
+ client.list_indexes(request)
# Establish that the underlying gRPC stub method was called.
assert mock_rpc.call_count == 1
- # Operation methods build a cached wrapper on first rpc call
- # subsequent calls should use the cached wrapper
- wrapper_fn.reset_mock()
-
- client.delete_index(request)
+ client.list_indexes(request)
# Establish that a new wrapper was not created for this call
assert wrapper_fn.call_count == 0
assert mock_rpc.call_count == 2
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_delete_index_rest_interceptors(null_interceptor):
- transport = transports.DatastoreAdminRestTransport(
+def test_list_indexes_rest_pager(transport: str = "rest"):
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.DatastoreAdminRestInterceptor(),
+ transport=transport,
)
- client = DatastoreAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_delete_index"
- ) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_delete_index"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore_admin.DeleteIndexRequest.pb(
- datastore_admin.DeleteIndexRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ datastore_admin.ListIndexesResponse(
+ indexes=[
+ index.Index(),
+ index.Index(),
+ index.Index(),
+ ],
+ next_page_token="abc",
+ ),
+ datastore_admin.ListIndexesResponse(
+ indexes=[],
+ next_page_token="def",
+ ),
+ datastore_admin.ListIndexesResponse(
+ indexes=[
+ index.Index(),
+ ],
+ next_page_token="ghi",
+ ),
+ datastore_admin.ListIndexesResponse(
+ indexes=[
+ index.Index(),
+ index.Index(),
+ ],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(
+ datastore_admin.ListIndexesResponse.to_json(x) for x in response
+ )
+ return_values = tuple(Response() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val._content = response_val.encode("UTF-8")
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ sample_request = {"project_id": "sample1"}
+
+ pager = client.list_indexes(request=sample_request)
+
+ results = list(pager)
+ assert len(results) == 6
+ assert all(isinstance(i, index.Index) for i in results)
+
+ pages = list(client.list_indexes(request=sample_request).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = DatastoreAdminClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = DatastoreAdminClient(
+ client_options=options,
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a credential.
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = DatastoreAdminClient(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = DatastoreAdminClient(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.DatastoreAdminGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.DatastoreAdminGrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.DatastoreAdminGrpcTransport,
+ transports.DatastoreAdminGrpcAsyncIOTransport,
+ transports.DatastoreAdminRestTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_kind_grpc():
+ transport = DatastoreAdminClient.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_export_entities_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.export_entities), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.export_entities(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ExportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_import_entities_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.import_entities), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.import_entities(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ImportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_index_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_index), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.create_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.CreateIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_index_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.delete_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.DeleteIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_index_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_index), "__call__") as call:
+ call.return_value = index.Index()
+ client.get_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.GetIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_indexes_empty_call_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
+ call.return_value = datastore_admin.ListIndexesResponse()
+ client.list_indexes(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ListIndexesRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = DatastoreAdminAsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_export_entities_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.export_entities), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.export_entities(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ExportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_import_entities_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.import_entities), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.import_entities(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ImportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_index_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.create_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.CreateIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_index_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.delete_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.DeleteIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_index_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ index.Index(
+ project_id="project_id_value",
+ index_id="index_id_value",
+ kind="kind_value",
+ ancestor=index.Index.AncestorMode.NONE,
+ state=index.Index.State.CREATING,
+ )
+ )
+ await client.get_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.GetIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_indexes_empty_call_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore_admin.ListIndexesResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_indexes(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ListIndexesRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+ transport = DatastoreAdminClient.get_transport_class("rest")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "rest"
+
+
+def test_export_entities_rest_bad_request(
+ request_type=datastore_admin.ExportEntitiesRequest,
+):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.export_entities(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore_admin.ExportEntitiesRequest,
+ dict,
+ ],
+)
+def test_export_entities_rest_call_success(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(name="operations/spam")
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.export_entities(request)
+
+ # Establish that the response is the type that we expect.
+ json_return_value = json_format.MessageToJson(return_value)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_export_entities_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreAdminRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.DatastoreAdminRestInterceptor(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ operation.Operation, "_set_result_from_operation"
+ ), mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "post_export_entities"
+ ) as post, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "pre_export_entities"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore_admin.ExportEntitiesRequest.pb(
+ datastore_admin.ExportEntitiesRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = json_format.MessageToJson(operations_pb2.Operation())
+ req.return_value.content = return_value
+
+ request = datastore_admin.ExportEntitiesRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = operations_pb2.Operation()
+
+ client.export_entities(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_import_entities_rest_bad_request(
+ request_type=datastore_admin.ImportEntitiesRequest,
+):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.import_entities(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore_admin.ImportEntitiesRequest,
+ dict,
+ ],
+)
+def test_import_entities_rest_call_success(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(name="operations/spam")
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.import_entities(request)
+
+ # Establish that the response is the type that we expect.
+ json_return_value = json_format.MessageToJson(return_value)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_import_entities_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreAdminRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.DatastoreAdminRestInterceptor(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ operation.Operation, "_set_result_from_operation"
+ ), mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "post_import_entities"
+ ) as post, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "pre_import_entities"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore_admin.ImportEntitiesRequest.pb(
+ datastore_admin.ImportEntitiesRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = json_format.MessageToJson(operations_pb2.Operation())
+ req.return_value.content = return_value
+
+ request = datastore_admin.ImportEntitiesRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = operations_pb2.Operation()
+
+ client.import_entities(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexRequest):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.create_index(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore_admin.CreateIndexRequest,
+ dict,
+ ],
+)
+def test_create_index_rest_call_success(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request_init["index"] = {
+ "project_id": "project_id_value",
+ "index_id": "index_id_value",
+ "kind": "kind_value",
+ "ancestor": 1,
+ "properties": [{"name": "name_value", "direction": 1}],
+ "state": 1,
+ }
+ # The version of a generated dependency at test runtime may differ from the version used during generation.
+ # Delete any fields which are not present in the current runtime dependency
+ # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+ # Determine if the message type is proto-plus or protobuf
+ test_field = datastore_admin.CreateIndexRequest.meta.fields["index"]
+
+ def get_message_fields(field):
+ # Given a field which is a message (composite type), return a list with
+ # all the fields of the message.
+ # If the field is not a composite type, return an empty list.
+ message_fields = []
+
+ if hasattr(field, "message") and field.message:
+ is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+ if is_field_type_proto_plus_type:
+ message_fields = field.message.meta.fields.values()
+ # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+ else: # pragma: NO COVER
+ message_fields = field.message.DESCRIPTOR.fields
+ return message_fields
+
+ runtime_nested_fields = [
+ (field.name, nested_field.name)
+ for field in get_message_fields(test_field)
+ for nested_field in get_message_fields(field)
+ ]
+
+ subfields_not_in_runtime = []
+
+ # For each item in the sample request, create a list of sub fields which are not present at runtime
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for field, value in request_init["index"].items(): # pragma: NO COVER
+ result = None
+ is_repeated = False
+ # For repeated fields
+ if isinstance(value, list) and len(value):
+ is_repeated = True
+ result = value[0]
+ # For fields where the type is another message
+ if isinstance(value, dict):
+ result = value
+
+ if result and hasattr(result, "keys"):
+ for subfield in result.keys():
+ if (field, subfield) not in runtime_nested_fields:
+ subfields_not_in_runtime.append(
+ {
+ "field": field,
+ "subfield": subfield,
+ "is_repeated": is_repeated,
+ }
+ )
+
+ # Remove fields from the sample request which are not present in the runtime version of the dependency
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+ field = subfield_to_delete.get("field")
+ field_repeated = subfield_to_delete.get("is_repeated")
+ subfield = subfield_to_delete.get("subfield")
+ if subfield:
+ if field_repeated:
+ for i in range(0, len(request_init["index"][field])):
+ del request_init["index"][field][i][subfield]
+ else:
+ del request_init["index"][field][subfield]
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(name="operations/spam")
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.create_index(request)
+
+ # Establish that the response is the type that we expect.
+ json_return_value = json_format.MessageToJson(return_value)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_index_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreAdminRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.DatastoreAdminRestInterceptor(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ operation.Operation, "_set_result_from_operation"
+ ), mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "post_create_index"
+ ) as post, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "pre_create_index"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore_admin.CreateIndexRequest.pb(
+ datastore_admin.CreateIndexRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = json_format.MessageToJson(
- operations_pb2.Operation()
+ return_value = json_format.MessageToJson(operations_pb2.Operation())
+ req.return_value.content = return_value
+
+ request = datastore_admin.CreateIndexRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = operations_pb2.Operation()
+
+ client.create_index(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexRequest):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.delete_index(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore_admin.DeleteIndexRequest,
+ dict,
+ ],
+)
+def test_delete_index_rest_call_success(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(name="operations/spam")
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.delete_index(request)
+
+ # Establish that the response is the type that we expect.
+ json_return_value = json_format.MessageToJson(return_value)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_index_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreAdminRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.DatastoreAdminRestInterceptor(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ operation.Operation, "_set_result_from_operation"
+ ), mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "post_delete_index"
+ ) as post, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "pre_delete_index"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore_admin.DeleteIndexRequest.pb(
+ datastore_admin.DeleteIndexRequest()
)
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = json_format.MessageToJson(operations_pb2.Operation())
+ req.return_value.content = return_value
request = datastore_admin.DeleteIndexRequest()
metadata = [
@@ -4283,7 +4592,132 @@ def test_delete_index_rest_interceptors(null_interceptor):
pre.return_value = request, metadata
post.return_value = operations_pb2.Operation()
- client.delete_index(
+ client.delete_index(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.get_index(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore_admin.GetIndexRequest,
+ dict,
+ ],
+)
+def test_get_index_rest_call_success(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = index.Index(
+ project_id="project_id_value",
+ index_id="index_id_value",
+ kind="kind_value",
+ ancestor=index.Index.AncestorMode.NONE,
+ state=index.Index.State.CREATING,
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = index.Index.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.get_index(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, index.Index)
+ assert response.project_id == "project_id_value"
+ assert response.index_id == "index_id_value"
+ assert response.kind == "kind_value"
+ assert response.ancestor == index.Index.AncestorMode.NONE
+ assert response.state == index.Index.State.CREATING
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_index_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreAdminRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.DatastoreAdminRestInterceptor(),
+ )
+ client = DatastoreAdminClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "post_get_index"
+ ) as post, mock.patch.object(
+ transports.DatastoreAdminRestInterceptor, "pre_get_index"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore_admin.GetIndexRequest.pb(
+ datastore_admin.GetIndexRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = index.Index.to_json(index.Index())
+ req.return_value.content = return_value
+
+ request = datastore_admin.GetIndexRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = index.Index()
+
+ client.get_index(
request,
metadata=[
("key", "val"),
@@ -4295,16 +4729,12 @@ def test_delete_index_rest_interceptors(null_interceptor):
post.assert_called_once()
-def test_delete_index_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.DeleteIndexRequest
-):
+def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesRequest):
client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
-
# send a request that will satisfy transcoding
- request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request_init = {"project_id": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
@@ -4312,105 +4742,56 @@ def test_delete_index_rest_bad_request(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
- response_value = Response()
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
response_value.status_code = 400
- response_value.request = Request()
+ response_value.request = mock.Mock()
req.return_value = response_value
- client.delete_index(request)
-
-
-def test_delete_index_rest_error():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
+ client.list_indexes(request)
@pytest.mark.parametrize(
"request_type",
[
- datastore_admin.GetIndexRequest,
+ datastore_admin.ListIndexesRequest,
dict,
],
)
-def test_get_index_rest(request_type):
+def test_list_indexes_rest_call_success(request_type):
client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
# send a request that will satisfy transcoding
- request_init = {"project_id": "sample1", "index_id": "sample2"}
+ request_init = {"project_id": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
- return_value = index.Index(
- project_id="project_id_value",
- index_id="index_id_value",
- kind="kind_value",
- ancestor=index.Index.AncestorMode.NONE,
- state=index.Index.State.CREATING,
+ return_value = datastore_admin.ListIndexesResponse(
+ next_page_token="next_page_token_value",
)
# Wrap the value into a proper Response obj
- response_value = Response()
+ response_value = mock.Mock()
response_value.status_code = 200
+
# Convert return value to protobuf type
- return_value = index.Index.pb(return_value)
+ return_value = datastore_admin.ListIndexesResponse.pb(return_value)
json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
+ response_value.content = json_return_value.encode("UTF-8")
req.return_value = response_value
- response = client.get_index(request)
+ response = client.list_indexes(request)
# Establish that the response is the type that we expect.
- assert isinstance(response, index.Index)
- assert response.project_id == "project_id_value"
- assert response.index_id == "index_id_value"
- assert response.kind == "kind_value"
- assert response.ancestor == index.Index.AncestorMode.NONE
- assert response.state == index.Index.State.CREATING
-
-
-def test_get_index_rest_use_cached_wrapped_rpc():
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Should wrap all calls on client creation
- assert wrapper_fn.call_count > 0
- wrapper_fn.reset_mock()
-
- # Ensure method has been cached
- assert client._transport.get_index in client._transport._wrapped_methods
-
- # Replace cached wrapped function with mock
- mock_rpc = mock.Mock()
- mock_rpc.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client._transport._wrapped_methods[client._transport.get_index] = mock_rpc
-
- request = {}
- client.get_index(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert mock_rpc.call_count == 1
-
- client.get_index(request)
-
- # Establish that a new wrapper was not created for this call
- assert wrapper_fn.call_count == 0
- assert mock_rpc.call_count == 2
+ assert isinstance(response, pagers.ListIndexesPager)
+ assert response.next_page_token == "next_page_token_value"
@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_get_index_rest_interceptors(null_interceptor):
+def test_list_indexes_rest_interceptors(null_interceptor):
transport = transports.DatastoreAdminRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
@@ -4418,19 +4799,20 @@ def test_get_index_rest_interceptors(null_interceptor):
else transports.DatastoreAdminRestInterceptor(),
)
client = DatastoreAdminClient(transport=transport)
+
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_get_index"
+ transports.DatastoreAdminRestInterceptor, "post_list_indexes"
) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_get_index"
+ transports.DatastoreAdminRestInterceptor, "pre_list_indexes"
) as pre:
pre.assert_not_called()
post.assert_not_called()
- pb_message = datastore_admin.GetIndexRequest.pb(
- datastore_admin.GetIndexRequest()
+ pb_message = datastore_admin.ListIndexesRequest.pb(
+ datastore_admin.ListIndexesRequest()
)
transcode.return_value = {
"method": "post",
@@ -4439,20 +4821,22 @@ def test_get_index_rest_interceptors(null_interceptor):
"query_params": pb_message,
}
- req.return_value = Response()
+ req.return_value = mock.Mock()
req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = index.Index.to_json(index.Index())
+ return_value = datastore_admin.ListIndexesResponse.to_json(
+ datastore_admin.ListIndexesResponse()
+ )
+ req.return_value.content = return_value
- request = datastore_admin.GetIndexRequest()
+ request = datastore_admin.ListIndexesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
- post.return_value = index.Index()
+ post.return_value = datastore_admin.ListIndexesResponse()
- client.get_index(
+ client.list_indexes(
request,
metadata=[
("key", "val"),
@@ -4464,17 +4848,17 @@ def test_get_index_rest_interceptors(null_interceptor):
post.assert_called_once()
-def test_get_index_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.GetIndexRequest
+def test_cancel_operation_rest_bad_request(
+ request_type=operations_pb2.CancelOperationRequest,
):
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
)
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1", "index_id": "sample2"}
- request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
@@ -4482,162 +4866,177 @@ def test_get_index_rest_bad_request(
):
# Wrap the value into a proper Response obj
response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
- client.get_index(request)
-
-
-def test_get_index_rest_error():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
+ client.cancel_operation(request)
@pytest.mark.parametrize(
"request_type",
[
- datastore_admin.ListIndexesRequest,
+ operations_pb2.CancelOperationRequest,
dict,
],
)
-def test_list_indexes_rest(request_type):
+def test_cancel_operation_rest(request_type):
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
+ request_init = {"name": "projects/sample1/operations/sample2"}
request = request_type(**request_init)
-
# Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
+ with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
- return_value = datastore_admin.ListIndexesResponse(
- next_page_token="next_page_token_value",
- )
+ return_value = None
# Wrap the value into a proper Response obj
- response_value = Response()
+ response_value = mock.Mock()
response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore_admin.ListIndexesResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
+ json_return_value = "{}"
+ response_value.content = json_return_value.encode("UTF-8")
- response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
- response = client.list_indexes(request)
+
+ response = client.cancel_operation(request)
# Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListIndexesPager)
- assert response.next_page_token == "next_page_token_value"
+ assert response is None
-def test_list_indexes_rest_use_cached_wrapped_rpc():
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
+def test_delete_operation_rest_bad_request(
+ request_type=operations_pb2.DeleteOperationRequest,
+):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
+ )
- # Should wrap all calls on client creation
- assert wrapper_fn.call_count > 0
- wrapper_fn.reset_mock()
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.delete_operation(request)
- # Ensure method has been cached
- assert client._transport.list_indexes in client._transport._wrapped_methods
- # Replace cached wrapped function with mock
- mock_rpc = mock.Mock()
- mock_rpc.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.DeleteOperationRequest,
+ dict,
+ ],
+)
+def test_delete_operation_rest(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
- request = {}
- client.list_indexes(request)
+ request_init = {"name": "projects/sample1/operations/sample2"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = None
- # Establish that the underlying gRPC stub method was called.
- assert mock_rpc.call_count == 1
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = "{}"
+ response_value.content = json_return_value.encode("UTF-8")
- client.list_indexes(request)
+ req.return_value = response_value
- # Establish that a new wrapper was not created for this call
- assert wrapper_fn.call_count == 0
- assert mock_rpc.call_count == 2
+ response = client.delete_operation(request)
+ # Establish that the response is the type that we expect.
+ assert response is None
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_list_indexes_rest_interceptors(null_interceptor):
- transport = transports.DatastoreAdminRestTransport(
+
+def test_get_operation_rest_bad_request(
+ request_type=operations_pb2.GetOperationRequest,
+):
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.DatastoreAdminRestInterceptor(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
+ )
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.get_operation(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.GetOperationRequest,
+ dict,
+ ],
+)
+def test_get_operation_rest(request_type):
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- client = DatastoreAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "post_list_indexes"
- ) as post, mock.patch.object(
- transports.DatastoreAdminRestInterceptor, "pre_list_indexes"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore_admin.ListIndexesRequest.pb(
- datastore_admin.ListIndexesRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore_admin.ListIndexesResponse.to_json(
- datastore_admin.ListIndexesResponse()
- )
+ request_init = {"name": "projects/sample1/operations/sample2"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation()
- request = datastore_admin.ListIndexesRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore_admin.ListIndexesResponse()
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
- client.list_indexes(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
+ req.return_value = response_value
- pre.assert_called_once()
- post.assert_called_once()
+ response = client.get_operation(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
-def test_list_indexes_rest_bad_request(
- transport: str = "rest", request_type=datastore_admin.ListIndexesRequest
+def test_list_operations_rest_bad_request(
+ request_type=operations_pb2.ListOperationsRequest,
):
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ transport="rest",
)
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
+ request = request_type()
+ request = json_format.ParseDict({"name": "projects/sample1"}, request)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
@@ -4645,179 +5044,190 @@ def test_list_indexes_rest_bad_request(
):
# Wrap the value into a proper Response obj
response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
- client.list_indexes(request)
+ client.list_operations(request)
-def test_list_indexes_rest_pager(transport: str = "rest"):
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.ListOperationsRequest,
+ dict,
+ ],
+)
+def test_list_operations_rest(request_type):
client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ transport="rest",
)
+ request_init = {"name": "projects/sample1"}
+ request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
- # TODO(kbandes): remove this mock unless there's a good reason for it.
- # with mock.patch.object(path_template, 'transcode') as transcode:
- # Set the response as a series of pages
- response = (
- datastore_admin.ListIndexesResponse(
- indexes=[
- index.Index(),
- index.Index(),
- index.Index(),
- ],
- next_page_token="abc",
- ),
- datastore_admin.ListIndexesResponse(
- indexes=[],
- next_page_token="def",
- ),
- datastore_admin.ListIndexesResponse(
- indexes=[
- index.Index(),
- ],
- next_page_token="ghi",
- ),
- datastore_admin.ListIndexesResponse(
- indexes=[
- index.Index(),
- index.Index(),
- ],
- ),
- )
- # Two responses for two calls
- response = response + response
-
- # Wrap the values into proper Response objs
- response = tuple(
- datastore_admin.ListIndexesResponse.to_json(x) for x in response
- )
- return_values = tuple(Response() for i in response)
- for return_val, response_val in zip(return_values, response):
- return_val._content = response_val.encode("UTF-8")
- return_val.status_code = 200
- req.side_effect = return_values
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse()
- sample_request = {"project_id": "sample1"}
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
- pager = client.list_indexes(request=sample_request)
+ req.return_value = response_value
- results = list(pager)
- assert len(results) == 6
- assert all(isinstance(i, index.Index) for i in results)
+ response = client.list_operations(request)
- pages = list(client.list_indexes(request=sample_request).pages)
- for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page_.raw_page.next_page_token == token
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.ListOperationsResponse)
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+def test_initialize_client_w_rest():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
- with pytest.raises(ValueError):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+ assert client is not None
- # It is an error to provide a credentials file and a transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_export_entities_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- with pytest.raises(ValueError):
- client = DatastoreAdminClient(
- client_options={"credentials_file": "credentials.json"},
- transport=transport,
- )
- # It is an error to provide an api_key and a transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.export_entities), "__call__") as call:
+ client.export_entities(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ExportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_import_entities_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = DatastoreAdminClient(
- client_options=options,
- transport=transport,
- )
- # It is an error to provide an api_key and a credential.
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = DatastoreAdminClient(
- client_options=options, credentials=ga_credentials.AnonymousCredentials()
- )
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.import_entities), "__call__") as call:
+ client.import_entities(request=None)
- # It is an error to provide scopes and a transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ImportEntitiesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_index_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- with pytest.raises(ValueError):
- client = DatastoreAdminClient(
- client_options={"scopes": ["1", "2"]},
- transport=transport,
- )
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_index), "__call__") as call:
+ client.create_index(request=None)
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.CreateIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_index_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- client = DatastoreAdminClient(transport=transport)
- assert client.transport is transport
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
+ client.delete_index(request=None)
-def test_transport_get_channel():
- # A client may be instantiated with a custom transport instance.
- transport = transports.DatastoreAdminGrpcTransport(
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.DeleteIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_index_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- channel = transport.grpc_channel
- assert channel
- transport = transports.DatastoreAdminGrpcAsyncIOTransport(
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_index), "__call__") as call:
+ client.get_index(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.GetIndexRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_indexes_empty_call_rest():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- channel = transport.grpc_channel
- assert channel
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
+ client.list_indexes(request=None)
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.DatastoreAdminGrpcTransport,
- transports.DatastoreAdminGrpcAsyncIOTransport,
- transports.DatastoreAdminRestTransport,
- ],
-)
-def test_transport_adc(transport_class):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transport_class()
- adc.assert_called_once()
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore_admin.ListIndexesRequest()
+
+ assert args[0] == request_msg
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- "rest",
- ],
-)
-def test_transport_kind(transport_name):
- transport = DatastoreAdminClient.get_transport_class(transport_name)(
+def test_datastore_admin_rest_lro_client():
+ client = DatastoreAdminClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ transport = client.transport
+
+ # Ensure that we have an api-core operations client.
+ assert isinstance(
+ transport.operations_client,
+ operations_v1.AbstractOperationsClient,
)
- assert transport.kind == transport_name
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
def test_transport_grpc_default():
@@ -5075,23 +5485,6 @@ def test_datastore_admin_http_transport_client_cert_source_for_mtls():
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
-def test_datastore_admin_rest_lro_client():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
- transport = client.transport
-
- # Ensure that we have a api-core operations client.
- assert isinstance(
- transport.operations_client,
- operations_v1.AbstractOperationsClient,
- )
-
- # Ensure that subsequent calls to the property send the exact same object.
- assert transport.operations_client is transport.operations_client
-
-
@pytest.mark.parametrize(
"transport_name",
[
@@ -5332,377 +5725,133 @@ def test_datastore_admin_grpc_lro_async_client():
)
# Ensure that subsequent calls to the property send the exact same object.
- assert transport.operations_client is transport.operations_client
-
-
-def test_common_billing_account_path():
- billing_account = "squid"
- expected = "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
- actual = DatastoreAdminClient.common_billing_account_path(billing_account)
- assert expected == actual
-
-
-def test_parse_common_billing_account_path():
- expected = {
- "billing_account": "clam",
- }
- path = DatastoreAdminClient.common_billing_account_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreAdminClient.parse_common_billing_account_path(path)
- assert expected == actual
-
-
-def test_common_folder_path():
- folder = "whelk"
- expected = "folders/{folder}".format(
- folder=folder,
- )
- actual = DatastoreAdminClient.common_folder_path(folder)
- assert expected == actual
-
-
-def test_parse_common_folder_path():
- expected = {
- "folder": "octopus",
- }
- path = DatastoreAdminClient.common_folder_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreAdminClient.parse_common_folder_path(path)
- assert expected == actual
-
-
-def test_common_organization_path():
- organization = "oyster"
- expected = "organizations/{organization}".format(
- organization=organization,
- )
- actual = DatastoreAdminClient.common_organization_path(organization)
- assert expected == actual
-
-
-def test_parse_common_organization_path():
- expected = {
- "organization": "nudibranch",
- }
- path = DatastoreAdminClient.common_organization_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreAdminClient.parse_common_organization_path(path)
- assert expected == actual
-
-
-def test_common_project_path():
- project = "cuttlefish"
- expected = "projects/{project}".format(
- project=project,
- )
- actual = DatastoreAdminClient.common_project_path(project)
- assert expected == actual
-
-
-def test_parse_common_project_path():
- expected = {
- "project": "mussel",
- }
- path = DatastoreAdminClient.common_project_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreAdminClient.parse_common_project_path(path)
- assert expected == actual
-
-
-def test_common_location_path():
- project = "winkle"
- location = "nautilus"
- expected = "projects/{project}/locations/{location}".format(
- project=project,
- location=location,
- )
- actual = DatastoreAdminClient.common_location_path(project, location)
- assert expected == actual
-
-
-def test_parse_common_location_path():
- expected = {
- "project": "scallop",
- "location": "abalone",
- }
- path = DatastoreAdminClient.common_location_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreAdminClient.parse_common_location_path(path)
- assert expected == actual
-
-
-def test_client_with_default_client_info():
- client_info = gapic_v1.client_info.ClientInfo()
-
- with mock.patch.object(
- transports.DatastoreAdminTransport, "_prep_wrapped_messages"
- ) as prep:
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
-
- with mock.patch.object(
- transports.DatastoreAdminTransport, "_prep_wrapped_messages"
- ) as prep:
- transport_class = DatastoreAdminClient.get_transport_class()
- transport = transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
-
-
-@pytest.mark.asyncio
-async def test_transport_close_async():
- client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
- with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
- ) as close:
- async with client:
- close.assert_not_called()
- close.assert_called_once()
-
-
-def test_cancel_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.CancelOperationRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.cancel_operation(request)
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.CancelOperationRequest,
- dict,
- ],
-)
-def test_cancel_operation_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = None
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = "{}"
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
-
- response = client.cancel_operation(request)
-
- # Establish that the response is the type that we expect.
- assert response is None
-
-
-def test_delete_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.delete_operation(request)
+ assert transport.operations_client is transport.operations_client
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.DeleteOperationRequest,
- dict,
- ],
-)
-def test_delete_operation_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+def test_common_billing_account_path():
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = None
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = "{}"
+ actual = DatastoreAdminClient.common_billing_account_path(billing_account)
+ assert expected == actual
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.delete_operation(request)
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = DatastoreAdminClient.common_billing_account_path(**expected)
- # Establish that the response is the type that we expect.
- assert response is None
+ # Check that the path construction is reversible.
+ actual = DatastoreAdminClient.parse_common_billing_account_path(path)
+ assert expected == actual
-def test_get_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.GetOperationRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+def test_common_folder_path():
+ folder = "whelk"
+ expected = "folders/{folder}".format(
+ folder=folder,
)
+ actual = DatastoreAdminClient.common_folder_path(folder)
+ assert expected == actual
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.get_operation(request)
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = DatastoreAdminClient.common_folder_path(**expected)
+ # Check that the path construction is reversible.
+ actual = DatastoreAdminClient.parse_common_folder_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.GetOperationRequest,
- dict,
- ],
-)
-def test_get_operation_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation()
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
+def test_common_organization_path():
+ organization = "oyster"
+ expected = "organizations/{organization}".format(
+ organization=organization,
+ )
+ actual = DatastoreAdminClient.common_organization_path(organization)
+ assert expected == actual
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.get_operation(request)
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = DatastoreAdminClient.common_organization_path(**expected)
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.Operation)
+ # Check that the path construction is reversible.
+ actual = DatastoreAdminClient.parse_common_organization_path(path)
+ assert expected == actual
-def test_list_operations_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
-):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+def test_common_project_path():
+ project = "cuttlefish"
+ expected = "projects/{project}".format(
+ project=project,
)
+ actual = DatastoreAdminClient.common_project_path(project)
+ assert expected == actual
- request = request_type()
- request = json_format.ParseDict({"name": "projects/sample1"}, request)
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.list_operations(request)
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = DatastoreAdminClient.common_project_path(**expected)
+ # Check that the path construction is reversible.
+ actual = DatastoreAdminClient.parse_common_project_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.ListOperationsRequest,
- dict,
- ],
-)
-def test_list_operations_rest(request_type):
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
)
- request_init = {"name": "projects/sample1"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.ListOperationsResponse()
+ actual = DatastoreAdminClient.common_location_path(project, location)
+ assert expected == actual
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = DatastoreAdminClient.common_location_path(**expected)
- response = client.list_operations(request)
+ # Check that the path construction is reversible.
+ actual = DatastoreAdminClient.parse_common_location_path(path)
+ assert expected == actual
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.ListOperationsResponse)
+
+def test_client_with_default_client_info():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.DatastoreAdminTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.DatastoreAdminTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = DatastoreAdminClient.get_transport_class()
+ transport = transport_class(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
def test_delete_operation(transport: str = "grpc"):
@@ -5732,7 +5881,7 @@ def test_delete_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_delete_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -5785,7 +5934,7 @@ def test_delete_operation_field_headers():
@pytest.mark.asyncio
async def test_delete_operation_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -5830,7 +5979,7 @@ def test_delete_operation_from_dict():
@pytest.mark.asyncio
async def test_delete_operation_from_dict_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
@@ -5871,7 +6020,7 @@ def test_cancel_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_cancel_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -5924,7 +6073,7 @@ def test_cancel_operation_field_headers():
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -5969,7 +6118,7 @@ def test_cancel_operation_from_dict():
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
@@ -6010,7 +6159,7 @@ def test_get_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_get_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -6065,7 +6214,7 @@ def test_get_operation_field_headers():
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6112,7 +6261,7 @@ def test_get_operation_from_dict():
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
@@ -6155,7 +6304,7 @@ def test_list_operations(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_list_operations_async(transport: str = "grpc_asyncio"):
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -6210,7 +6359,7 @@ def test_list_operations_field_headers():
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6257,7 +6406,7 @@ def test_list_operations_from_dict():
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = DatastoreAdminAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
@@ -6273,22 +6422,41 @@ async def test_list_operations_from_dict_async():
call.assert_called()
-def test_transport_close():
- transports = {
- "rest": "_session",
- "grpc": "_grpc_channel",
- }
+def test_transport_close_grpc():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
- for transport, close_name in transports.items():
- client = DatastoreAdminClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+ client = DatastoreAdminAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close_rest():
+ client = DatastoreAdminClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_session")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py
index aa1af525..8a28ba74 100644
--- a/tests/unit/gapic/datastore_v1/test_datastore.py
+++ b/tests/unit/gapic/datastore_v1/test_datastore.py
@@ -24,7 +24,7 @@
import grpc
from grpc.experimental import aio
-from collections.abc import Iterable
+from collections.abc import Iterable, AsyncIterable
from google.protobuf import json_format
import json
import math
@@ -37,6 +37,13 @@
from requests.sessions import Session
from google.protobuf import json_format
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
@@ -63,10 +70,24 @@
import google.auth
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -1122,25 +1143,6 @@ def test_lookup(request_type, transport: str = "grpc"):
assert response.transaction == b"transaction_blob"
-def test_lookup_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.lookup), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.lookup()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.LookupRequest()
-
-
def test_lookup_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1206,36 +1208,13 @@ def test_lookup_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_lookup_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.lookup), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.LookupResponse(
- transaction=b"transaction_blob",
- )
- )
- response = await client.lookup()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.LookupRequest()
-
-
@pytest.mark.asyncio
async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1274,7 +1253,7 @@ async def test_lookup_async(
transport: str = "grpc_asyncio", request_type=datastore.LookupRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1308,47 +1287,6 @@ async def test_lookup_async_from_dict():
await test_lookup_async(request_type=dict)
-def test_lookup_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.LookupRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.lookup), "__call__") as call:
- call.return_value = datastore.LookupResponse()
- client.lookup(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.LookupRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.lookup), "__call__") as call:
- call.return_value = datastore.LookupResponse()
- client.lookup(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
def test_lookup_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -1416,7 +1354,7 @@ def test_lookup_flattened_error():
@pytest.mark.asyncio
async def test_lookup_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1463,7 +1401,7 @@ async def test_lookup_flattened_async():
@pytest.mark.asyncio
async def test_lookup_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1519,25 +1457,6 @@ def test_run_query(request_type, transport: str = "grpc"):
assert response.transaction == b"transaction_blob"
-def test_run_query_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.run_query), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.run_query()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RunQueryRequest()
-
-
def test_run_query_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1603,36 +1522,13 @@ def test_run_query_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_run_query_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.run_query), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.RunQueryResponse(
- transaction=b"transaction_blob",
- )
- )
- response = await client.run_query()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RunQueryRequest()
-
-
@pytest.mark.asyncio
async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1671,7 +1567,7 @@ async def test_run_query_async(
transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1705,47 +1601,6 @@ async def test_run_query_async_from_dict():
await test_run_query_async(request_type=dict)
-def test_run_query_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RunQueryRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.run_query), "__call__") as call:
- call.return_value = datastore.RunQueryResponse()
- client.run_query(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RunQueryRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.run_query), "__call__") as call:
- call.return_value = datastore.RunQueryResponse()
- client.run_query(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
@pytest.mark.parametrize(
"request_type",
[
@@ -1784,27 +1639,6 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"):
assert response.transaction == b"transaction_blob"
-def test_run_aggregation_query_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.run_aggregation_query), "__call__"
- ) as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.run_aggregation_query()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RunAggregationQueryRequest()
-
-
def test_run_aggregation_query_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1877,31 +1711,6 @@ def test_run_aggregation_query_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_run_aggregation_query_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.run_aggregation_query), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.RunAggregationQueryResponse(
- transaction=b"transaction_blob",
- )
- )
- response = await client.run_aggregation_query()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RunAggregationQueryRequest()
-
-
@pytest.mark.asyncio
async def test_run_aggregation_query_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1910,7 +1719,7 @@ async def test_run_aggregation_query_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1949,7 +1758,7 @@ async def test_run_aggregation_query_async(
transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1985,51 +1794,6 @@ async def test_run_aggregation_query_async_from_dict():
await test_run_aggregation_query_async(request_type=dict)
-def test_run_aggregation_query_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.run_aggregation_query), "__call__"
- ) as call:
- call.return_value = datastore.RunAggregationQueryResponse()
- client.run_aggregation_query(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.run_aggregation_query), "__call__"
- ) as call:
- call.return_value = datastore.RunAggregationQueryResponse()
- client.run_aggregation_query(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
@pytest.mark.parametrize(
"request_type",
[
@@ -2068,27 +1832,6 @@ def test_begin_transaction(request_type, transport: str = "grpc"):
assert response.transaction == b"transaction_blob"
-def test_begin_transaction_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.begin_transaction), "__call__"
- ) as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.begin_transaction()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.BeginTransactionRequest()
-
-
def test_begin_transaction_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2159,41 +1902,16 @@ def test_begin_transaction_use_cached_wrapped_rpc():
@pytest.mark.asyncio
-async def test_begin_transaction_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.begin_transaction), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.BeginTransactionResponse(
- transaction=b"transaction_blob",
- )
- )
- response = await client.begin_transaction()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.BeginTransactionRequest()
-
-
-@pytest.mark.asyncio
-async def test_begin_transaction_async_use_cached_wrapped_rpc(
- transport: str = "grpc_asyncio",
-):
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+async def test_begin_transaction_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
# Should wrap all calls on client creation
assert wrapper_fn.call_count > 0
@@ -2230,7 +1948,7 @@ async def test_begin_transaction_async(
transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2266,51 +1984,6 @@ async def test_begin_transaction_async_from_dict():
await test_begin_transaction_async(request_type=dict)
-def test_begin_transaction_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.BeginTransactionRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.begin_transaction), "__call__"
- ) as call:
- call.return_value = datastore.BeginTransactionResponse()
- client.begin_transaction(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.BeginTransactionRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.begin_transaction), "__call__"
- ) as call:
- call.return_value = datastore.BeginTransactionResponse()
- client.begin_transaction(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
def test_begin_transaction_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -2354,7 +2027,7 @@ def test_begin_transaction_flattened_error():
@pytest.mark.asyncio
async def test_begin_transaction_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2385,7 +2058,7 @@ async def test_begin_transaction_flattened_async():
@pytest.mark.asyncio
async def test_begin_transaction_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2433,25 +2106,6 @@ def test_commit(request_type, transport: str = "grpc"):
assert response.index_updates == 1389
-def test_commit_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.commit), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.commit()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.CommitRequest()
-
-
def test_commit_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2517,36 +2171,13 @@ def test_commit_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_commit_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.commit), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.CommitResponse(
- index_updates=1389,
- )
- )
- response = await client.commit()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.CommitRequest()
-
-
@pytest.mark.asyncio
async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2585,7 +2216,7 @@ async def test_commit_async(
transport: str = "grpc_asyncio", request_type=datastore.CommitRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2619,47 +2250,6 @@ async def test_commit_async_from_dict():
await test_commit_async(request_type=dict)
-def test_commit_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.CommitRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.commit), "__call__") as call:
- call.return_value = datastore.CommitResponse()
- client.commit(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.CommitRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.commit), "__call__") as call:
- call.return_value = datastore.CommitResponse()
- client.commit(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
def test_commit_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -2742,7 +2332,7 @@ def test_commit_flattened_error():
@pytest.mark.asyncio
async def test_commit_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2799,7 +2389,7 @@ async def test_commit_flattened_async():
@pytest.mark.asyncio
async def test_commit_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2857,25 +2447,6 @@ def test_rollback(request_type, transport: str = "grpc"):
assert isinstance(response, datastore.RollbackResponse)
-def test_rollback_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.rollback), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.rollback()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RollbackRequest()
-
-
def test_rollback_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2941,34 +2512,13 @@ def test_rollback_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_rollback_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.rollback), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.RollbackResponse()
- )
- response = await client.rollback()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.RollbackRequest()
-
-
@pytest.mark.asyncio
async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3007,7 +2557,7 @@ async def test_rollback_async(
transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3038,47 +2588,6 @@ async def test_rollback_async_from_dict():
await test_rollback_async(request_type=dict)
-def test_rollback_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RollbackRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.rollback), "__call__") as call:
- call.return_value = datastore.RollbackResponse()
- client.rollback(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.RollbackRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.rollback), "__call__") as call:
- call.return_value = datastore.RollbackResponse()
- client.rollback(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
def test_rollback_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3125,7 +2634,7 @@ def test_rollback_flattened_error():
@pytest.mark.asyncio
async def test_rollback_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3158,7 +2667,7 @@ async def test_rollback_flattened_async():
@pytest.mark.asyncio
async def test_rollback_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3204,25 +2713,6 @@ def test_allocate_ids(request_type, transport: str = "grpc"):
assert isinstance(response, datastore.AllocateIdsResponse)
-def test_allocate_ids_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.allocate_ids()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.AllocateIdsRequest()
-
-
def test_allocate_ids_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -3288,27 +2778,6 @@ def test_allocate_ids_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_allocate_ids_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.AllocateIdsResponse()
- )
- response = await client.allocate_ids()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.AllocateIdsRequest()
-
-
@pytest.mark.asyncio
async def test_allocate_ids_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -3317,7 +2786,7 @@ async def test_allocate_ids_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3356,7 +2825,7 @@ async def test_allocate_ids_async(
transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3387,69 +2856,28 @@ async def test_allocate_ids_async_from_dict():
await test_allocate_ids_async(request_type=dict)
-def test_allocate_ids_routing_parameters():
+def test_allocate_ids_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
)
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.AllocateIdsRequest(**{"project_id": "sample1"})
-
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
call.return_value = datastore.AllocateIdsResponse()
- client.allocate_ids(request)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.allocate_ids(
+ project_id="project_id_value",
+ keys=[
+ entity.Key(
+ partition_id=entity.PartitionId(project_id="project_id_value")
+ )
+ ],
+ )
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.AllocateIdsRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
- call.return_value = datastore.AllocateIdsResponse()
- client.allocate_ids(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
-def test_allocate_ids_flattened():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = datastore.AllocateIdsResponse()
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- client.allocate_ids(
- project_id="project_id_value",
- keys=[
- entity.Key(
- partition_id=entity.PartitionId(project_id="project_id_value")
- )
- ],
- )
-
- # Establish that the underlying call was made with the expected
- # request object values.
+ # Establish that the underlying call was made with the expected
+ # request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].project_id
@@ -3484,7 +2912,7 @@ def test_allocate_ids_flattened_error():
@pytest.mark.asyncio
async def test_allocate_ids_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3523,7 +2951,7 @@ async def test_allocate_ids_flattened_async():
@pytest.mark.asyncio
async def test_allocate_ids_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3573,25 +3001,6 @@ def test_reserve_ids(request_type, transport: str = "grpc"):
assert isinstance(response, datastore.ReserveIdsResponse)
-def test_reserve_ids_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.reserve_ids()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.ReserveIdsRequest()
-
-
def test_reserve_ids_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -3657,27 +3066,6 @@ def test_reserve_ids_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_reserve_ids_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- datastore.ReserveIdsResponse()
- )
- response = await client.reserve_ids()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == datastore.ReserveIdsRequest()
-
-
@pytest.mark.asyncio
async def test_reserve_ids_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -3686,7 +3074,7 @@ async def test_reserve_ids_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3725,7 +3113,7 @@ async def test_reserve_ids_async(
transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest
):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3756,47 +3144,6 @@ async def test_reserve_ids_async_from_dict():
await test_reserve_ids_async(request_type=dict)
-def test_reserve_ids_routing_parameters():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.ReserveIdsRequest(**{"project_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
- call.return_value = datastore.ReserveIdsResponse()
- client.reserve_ids(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = datastore.ReserveIdsRequest(**{"database_id": "sample1"})
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
- call.return_value = datastore.ReserveIdsResponse()
- client.reserve_ids(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- _, _, kw = call.mock_calls[0]
- # This test doesn't assert anything useful.
- assert kw["metadata"]
-
-
def test_reserve_ids_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3853,7 +3200,7 @@ def test_reserve_ids_flattened_error():
@pytest.mark.asyncio
async def test_reserve_ids_flattened_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3892,7 +3239,7 @@ async def test_reserve_ids_flattened_async():
@pytest.mark.asyncio
async def test_reserve_ids_flattened_error_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3909,46 +3256,6 @@ async def test_reserve_ids_flattened_error_async():
)
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.LookupRequest,
- dict,
- ],
-)
-def test_lookup_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.LookupResponse(
- transaction=b"transaction_blob",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.LookupResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.lookup(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.LookupResponse)
- assert response.transaction == b"transaction_blob"
-
-
def test_lookup_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -4075,83 +3382,6 @@ def test_lookup_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_lookup_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_lookup"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_lookup"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.LookupRequest.pb(datastore.LookupRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.LookupResponse.to_json(
- datastore.LookupResponse()
- )
-
- request = datastore.LookupRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.LookupResponse()
-
- client.lookup(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_lookup_rest_bad_request(
- transport: str = "rest", request_type=datastore.LookupRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.lookup(request)
-
-
def test_lookup_rest_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -4223,74 +3453,28 @@ def test_lookup_rest_flattened_error(transport: str = "rest"):
)
-def test_lookup_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
+def test_run_query_rest_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.RunQueryRequest,
- dict,
- ],
-)
-def test_run_query_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
+ # Ensure method has been cached
+ assert client._transport.run_query in client._transport._wrapped_methods
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.RunQueryResponse(
- transaction=b"transaction_blob",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.RunQueryResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.run_query(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.RunQueryResponse)
- assert response.transaction == b"transaction_blob"
-
-
-def test_run_query_rest_use_cached_wrapped_rpc():
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Should wrap all calls on client creation
- assert wrapper_fn.call_count > 0
- wrapper_fn.reset_mock()
-
- # Ensure method has been cached
- assert client._transport.run_query in client._transport._wrapped_methods
-
- # Replace cached wrapped function with mock
- mock_rpc = mock.Mock()
- mock_rpc.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client._transport._wrapped_methods[client._transport.run_query] = mock_rpc
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.run_query] = mock_rpc
request = {}
client.run_query(request)
@@ -4387,129 +3571,6 @@ def test_run_query_rest_unset_required_fields():
assert set(unset_fields) == (set(()) & set(("projectId",)))
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_run_query_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_run_query"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_run_query"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.RunQueryResponse.to_json(
- datastore.RunQueryResponse()
- )
-
- request = datastore.RunQueryRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.RunQueryResponse()
-
- client.run_query(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_run_query_rest_bad_request(
- transport: str = "rest", request_type=datastore.RunQueryRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.run_query(request)
-
-
-def test_run_query_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.RunAggregationQueryRequest,
- dict,
- ],
-)
-def test_run_aggregation_query_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.RunAggregationQueryResponse(
- transaction=b"transaction_blob",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.RunAggregationQueryResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.run_aggregation_query(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.RunAggregationQueryResponse)
- assert response.transaction == b"transaction_blob"
-
-
def test_run_aggregation_query_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -4635,173 +3696,48 @@ def test_run_aggregation_query_rest_unset_required_fields():
assert set(unset_fields) == (set(()) & set(("projectId",)))
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_run_aggregation_query_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_run_aggregation_query"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_run_aggregation_query"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.RunAggregationQueryRequest.pb(
- datastore.RunAggregationQueryRequest()
+def test_begin_transaction_rest_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.RunAggregationQueryResponse.to_json(
- datastore.RunAggregationQueryResponse()
- )
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- request = datastore.RunAggregationQueryRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.RunAggregationQueryResponse()
+ # Ensure method has been cached
+ assert client._transport.begin_transaction in client._transport._wrapped_methods
- client.run_aggregation_query(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
)
+ client._transport._wrapped_methods[
+ client._transport.begin_transaction
+ ] = mock_rpc
- pre.assert_called_once()
- post.assert_called_once()
+ request = {}
+ client.begin_transaction(request)
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
-def test_run_aggregation_query_rest_bad_request(
- transport: str = "rest", request_type=datastore.RunAggregationQueryRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+ client.begin_transaction(request)
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.run_aggregation_query(request)
-
-def test_run_aggregation_query_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.BeginTransactionRequest,
- dict,
- ],
-)
-def test_begin_transaction_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.BeginTransactionResponse(
- transaction=b"transaction_blob",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.BeginTransactionResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.begin_transaction(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.BeginTransactionResponse)
- assert response.transaction == b"transaction_blob"
-
-
-def test_begin_transaction_rest_use_cached_wrapped_rpc():
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Should wrap all calls on client creation
- assert wrapper_fn.call_count > 0
- wrapper_fn.reset_mock()
-
- # Ensure method has been cached
- assert client._transport.begin_transaction in client._transport._wrapped_methods
-
- # Replace cached wrapped function with mock
- mock_rpc = mock.Mock()
- mock_rpc.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client._transport._wrapped_methods[
- client._transport.begin_transaction
- ] = mock_rpc
-
- request = {}
- client.begin_transaction(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert mock_rpc.call_count == 1
-
- client.begin_transaction(request)
-
- # Establish that a new wrapper was not created for this call
- assert wrapper_fn.call_count == 0
- assert mock_rpc.call_count == 2
-
-
-def test_begin_transaction_rest_required_fields(
- request_type=datastore.BeginTransactionRequest,
-):
- transport_class = transports.DatastoreRestTransport
+def test_begin_transaction_rest_required_fields(
+ request_type=datastore.BeginTransactionRequest,
+):
+ transport_class = transports.DatastoreRestTransport
request_init = {}
request_init["project_id"] = ""
@@ -4882,85 +3818,6 @@ def test_begin_transaction_rest_unset_required_fields():
assert set(unset_fields) == (set(()) & set(("projectId",)))
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_begin_transaction_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_begin_transaction"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_begin_transaction"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.BeginTransactionRequest.pb(
- datastore.BeginTransactionRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.BeginTransactionResponse.to_json(
- datastore.BeginTransactionResponse()
- )
-
- request = datastore.BeginTransactionRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.BeginTransactionResponse()
-
- client.begin_transaction(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_begin_transaction_rest_bad_request(
- transport: str = "rest", request_type=datastore.BeginTransactionRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.begin_transaction(request)
-
-
def test_begin_transaction_rest_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -5017,52 +3874,6 @@ def test_begin_transaction_rest_flattened_error(transport: str = "rest"):
)
-def test_begin_transaction_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.CommitRequest,
- dict,
- ],
-)
-def test_commit_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.CommitResponse(
- index_updates=1389,
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.CommitResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.commit(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.CommitResponse)
- assert response.index_updates == 1389
-
-
def test_commit_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -5181,96 +3992,19 @@ def test_commit_rest_unset_required_fields():
assert set(unset_fields) == (set(()) & set(("projectId",)))
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_commit_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
+def test_commit_rest_flattened():
+ client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ transport="rest",
)
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_commit"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_commit"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.CommitRequest.pb(datastore.CommitRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.CommitResponse.to_json(
- datastore.CommitResponse()
- )
- request = datastore.CommitRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.CommitResponse()
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.CommitResponse()
- client.commit(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_commit_rest_bad_request(
- transport: str = "rest", request_type=datastore.CommitRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.commit(request)
-
-
-def test_commit_rest_flattened():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.CommitResponse()
-
- # get arguments that satisfy an http rule for this method
- sample_request = {"project_id": "sample1"}
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"project_id": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
@@ -5338,49 +4072,6 @@ def test_commit_rest_flattened_error(transport: str = "rest"):
)
-def test_commit_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.RollbackRequest,
- dict,
- ],
-)
-def test_rollback_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.RollbackResponse()
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.RollbackResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.rollback(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.RollbackResponse)
-
-
def test_rollback_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -5511,83 +4202,6 @@ def test_rollback_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_rollback_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_rollback"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_rollback"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.RollbackResponse.to_json(
- datastore.RollbackResponse()
- )
-
- request = datastore.RollbackRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.RollbackResponse()
-
- client.rollback(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_rollback_rest_bad_request(
- transport: str = "rest", request_type=datastore.RollbackRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.rollback(request)
-
-
def test_rollback_rest_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -5645,49 +4259,6 @@ def test_rollback_rest_flattened_error(transport: str = "rest"):
)
-def test_rollback_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.AllocateIdsRequest,
- dict,
- ],
-)
-def test_allocate_ids_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.AllocateIdsResponse()
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.AllocateIdsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.allocate_ids(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.AllocateIdsResponse)
-
-
def test_allocate_ids_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -5814,107 +4385,30 @@ def test_allocate_ids_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_allocate_ids_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
+def test_allocate_ids_rest_flattened():
+ client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ transport="rest",
)
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_allocate_ids"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_allocate_ids"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.AllocateIdsResponse.to_json(
- datastore.AllocateIdsResponse()
- )
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.AllocateIdsResponse()
- request = datastore.AllocateIdsRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.AllocateIdsResponse()
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"project_id": "sample1"}
- client.allocate_ids(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
+ # get truthy value for each flattened field
+ mock_args = dict(
+ project_id="project_id_value",
+ keys=[
+ entity.Key(
+ partition_id=entity.PartitionId(project_id="project_id_value")
+ )
],
)
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_allocate_ids_rest_bad_request(
- transport: str = "rest", request_type=datastore.AllocateIdsRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.allocate_ids(request)
-
-
-def test_allocate_ids_rest_flattened():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.AllocateIdsResponse()
-
- # get arguments that satisfy an http rule for this method
- sample_request = {"project_id": "sample1"}
-
- # get truthy value for each flattened field
- mock_args = dict(
- project_id="project_id_value",
- keys=[
- entity.Key(
- partition_id=entity.PartitionId(project_id="project_id_value")
- )
- ],
- )
- mock_args.update(sample_request)
+ mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
@@ -5956,49 +4450,6 @@ def test_allocate_ids_rest_flattened_error(transport: str = "rest"):
)
-def test_allocate_ids_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- datastore.ReserveIdsRequest,
- dict,
- ],
-)
-def test_reserve_ids_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = datastore.ReserveIdsResponse()
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = datastore.ReserveIdsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.reserve_ids(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, datastore.ReserveIdsResponse)
-
-
def test_reserve_ids_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -6125,83 +4576,6 @@ def test_reserve_ids_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_reserve_ids_rest_interceptors(null_interceptor):
- transport = transports.DatastoreRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
- )
- client = DatastoreClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.DatastoreRestInterceptor, "post_reserve_ids"
- ) as post, mock.patch.object(
- transports.DatastoreRestInterceptor, "pre_reserve_ids"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest())
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = datastore.ReserveIdsResponse.to_json(
- datastore.ReserveIdsResponse()
- )
-
- request = datastore.ReserveIdsRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = datastore.ReserveIdsResponse()
-
- client.reserve_ids(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_reserve_ids_rest_bad_request(
- transport: str = "rest", request_type=datastore.ReserveIdsRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_id": "sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.reserve_ids(request)
-
-
def test_reserve_ids_rest_flattened():
client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -6267,12 +4641,6 @@ def test_reserve_ids_rest_flattened_error(transport: str = "rest"):
)
-def test_reserve_ids_rest_error():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.DatastoreGrpcTransport(
@@ -6342,41 +4710,3012 @@ def test_transport_get_channel():
channel = transport.grpc_channel
assert channel
- transport = transports.DatastoreGrpcAsyncIOTransport(
+ transport = transports.DatastoreGrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.DatastoreGrpcTransport,
+ transports.DatastoreGrpcAsyncIOTransport,
+ transports.DatastoreRestTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_kind_grpc():
+ transport = DatastoreClient.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_lookup_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ call.return_value = datastore.LookupResponse()
+ client.lookup(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.LookupRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_query_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ call.return_value = datastore.RunQueryResponse()
+ client.run_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_aggregation_query_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ call.return_value = datastore.RunAggregationQueryResponse()
+ client.run_aggregation_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_begin_transaction_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ call.return_value = datastore.BeginTransactionResponse()
+ client.begin_transaction(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_commit_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ call.return_value = datastore.CommitResponse()
+ client.commit(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.CommitRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_rollback_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ call.return_value = datastore.RollbackResponse()
+ client.rollback(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_allocate_ids_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ call.return_value = datastore.AllocateIdsResponse()
+ client.allocate_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_reserve_ids_empty_call_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ call.return_value = datastore.ReserveIdsResponse()
+ client.reserve_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest()
+
+ assert args[0] == request_msg
+
+
+def test_lookup_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ call.return_value = datastore.LookupResponse()
+ client.lookup(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_lookup_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ call.return_value = datastore.LookupResponse()
+ client.lookup(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_query_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ call.return_value = datastore.RunQueryResponse()
+ client.run_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_query_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ call.return_value = datastore.RunQueryResponse()
+ client.run_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_aggregation_query_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ call.return_value = datastore.RunAggregationQueryResponse()
+ client.run_aggregation_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_aggregation_query_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ call.return_value = datastore.RunAggregationQueryResponse()
+ client.run_aggregation_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_begin_transaction_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ call.return_value = datastore.BeginTransactionResponse()
+ client.begin_transaction(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_begin_transaction_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ call.return_value = datastore.BeginTransactionResponse()
+ client.begin_transaction(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_commit_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ call.return_value = datastore.CommitResponse()
+ client.commit(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_commit_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ call.return_value = datastore.CommitResponse()
+ client.commit(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_rollback_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ call.return_value = datastore.RollbackResponse()
+ client.rollback(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_rollback_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ call.return_value = datastore.RollbackResponse()
+ client.rollback(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_allocate_ids_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ call.return_value = datastore.AllocateIdsResponse()
+ client.allocate_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_allocate_ids_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ call.return_value = datastore.AllocateIdsResponse()
+ client.allocate_ids(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_reserve_ids_routing_parameters_request_1_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ call.return_value = datastore.ReserveIdsResponse()
+ client.reserve_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_reserve_ids_routing_parameters_request_2_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ call.return_value = datastore.ReserveIdsResponse()
+ client.reserve_ids(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = DatastoreAsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_lookup_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.LookupResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.lookup(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.LookupRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_run_query_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_run_aggregation_query_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunAggregationQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_aggregation_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_begin_transaction_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.BeginTransactionResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.begin_transaction(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_commit_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.CommitResponse(
+ index_updates=1389,
+ )
+ )
+ await client.commit(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.CommitRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_rollback_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RollbackResponse()
+ )
+ await client.rollback(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_allocate_ids_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.AllocateIdsResponse()
+ )
+ await client.allocate_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_reserve_ids_empty_call_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.ReserveIdsResponse()
+ )
+ await client.reserve_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest()
+
+ assert args[0] == request_msg
+
+
+@pytest.mark.asyncio
+async def test_lookup_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.LookupResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.lookup(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_lookup_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.LookupResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.lookup(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_run_query_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_run_query_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_run_aggregation_query_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunAggregationQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_aggregation_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_run_aggregation_query_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RunAggregationQueryResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.run_aggregation_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.BeginTransactionResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.begin_transaction(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.BeginTransactionResponse(
+ transaction=b"transaction_blob",
+ )
+ )
+ await client.begin_transaction(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_commit_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.CommitResponse(
+ index_updates=1389,
+ )
+ )
+ await client.commit(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_commit_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.CommitResponse(
+ index_updates=1389,
+ )
+ )
+ await client.commit(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_rollback_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RollbackResponse()
+ )
+ await client.rollback(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_rollback_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.RollbackResponse()
+ )
+ await client.rollback(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_allocate_ids_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.AllocateIdsResponse()
+ )
+ await client.allocate_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_allocate_ids_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.AllocateIdsResponse()
+ )
+ await client.allocate_ids(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_reserve_ids_routing_parameters_request_1_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.ReserveIdsResponse()
+ )
+ await client.reserve_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_reserve_ids_routing_parameters_request_2_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ datastore.ReserveIdsResponse()
+ )
+ await client.reserve_ids(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_transport_kind_rest():
+ transport = DatastoreClient.get_transport_class("rest")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "rest"
+
+
+def test_lookup_rest_bad_request(request_type=datastore.LookupRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.lookup(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.LookupRequest,
+ dict,
+ ],
+)
+def test_lookup_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.LookupResponse(
+ transaction=b"transaction_blob",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.LookupResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.lookup(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.LookupResponse)
+ assert response.transaction == b"transaction_blob"
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_lookup_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_lookup"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_lookup"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.LookupRequest.pb(datastore.LookupRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.LookupResponse.to_json(datastore.LookupResponse())
+ req.return_value.content = return_value
+
+ request = datastore.LookupRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.LookupResponse()
+
+ client.lookup(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.run_query(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.RunQueryRequest,
+ dict,
+ ],
+)
+def test_run_query_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.RunQueryResponse(
+ transaction=b"transaction_blob",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.RunQueryResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.run_query(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.RunQueryResponse)
+ assert response.transaction == b"transaction_blob"
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_run_query_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_run_query"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_run_query"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.RunQueryResponse.to_json(datastore.RunQueryResponse())
+ req.return_value.content = return_value
+
+ request = datastore.RunQueryRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.RunQueryResponse()
+
+ client.run_query(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_run_aggregation_query_rest_bad_request(
+ request_type=datastore.RunAggregationQueryRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.run_aggregation_query(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.RunAggregationQueryRequest,
+ dict,
+ ],
+)
+def test_run_aggregation_query_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.RunAggregationQueryResponse(
+ transaction=b"transaction_blob",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.RunAggregationQueryResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.run_aggregation_query(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.RunAggregationQueryResponse)
+ assert response.transaction == b"transaction_blob"
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_run_aggregation_query_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_run_aggregation_query"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_run_aggregation_query"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.RunAggregationQueryRequest.pb(
+ datastore.RunAggregationQueryRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.RunAggregationQueryResponse.to_json(
+ datastore.RunAggregationQueryResponse()
+ )
+ req.return_value.content = return_value
+
+ request = datastore.RunAggregationQueryRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.RunAggregationQueryResponse()
+
+ client.run_aggregation_query(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_begin_transaction_rest_bad_request(
+ request_type=datastore.BeginTransactionRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.begin_transaction(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.BeginTransactionRequest,
+ dict,
+ ],
+)
+def test_begin_transaction_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.BeginTransactionResponse(
+ transaction=b"transaction_blob",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.BeginTransactionResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.begin_transaction(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.BeginTransactionResponse)
+ assert response.transaction == b"transaction_blob"
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_begin_transaction_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_begin_transaction"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_begin_transaction"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.BeginTransactionRequest.pb(
+ datastore.BeginTransactionRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.BeginTransactionResponse.to_json(
+ datastore.BeginTransactionResponse()
+ )
+ req.return_value.content = return_value
+
+ request = datastore.BeginTransactionRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.BeginTransactionResponse()
+
+ client.begin_transaction(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_commit_rest_bad_request(request_type=datastore.CommitRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.commit(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.CommitRequest,
+ dict,
+ ],
+)
+def test_commit_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.CommitResponse(
+ index_updates=1389,
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.CommitResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.commit(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.CommitResponse)
+ assert response.index_updates == 1389
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_commit_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_commit"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_commit"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.CommitRequest.pb(datastore.CommitRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.CommitResponse.to_json(datastore.CommitResponse())
+ req.return_value.content = return_value
+
+ request = datastore.CommitRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.CommitResponse()
+
+ client.commit(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.rollback(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.RollbackRequest,
+ dict,
+ ],
+)
+def test_rollback_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.RollbackResponse()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.RollbackResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.rollback(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.RollbackResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_rollback_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_rollback"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_rollback"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.RollbackResponse.to_json(datastore.RollbackResponse())
+ req.return_value.content = return_value
+
+ request = datastore.RollbackRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.RollbackResponse()
+
+ client.rollback(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.allocate_ids(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.AllocateIdsRequest,
+ dict,
+ ],
+)
+def test_allocate_ids_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.AllocateIdsResponse()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.AllocateIdsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.allocate_ids(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.AllocateIdsResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_allocate_ids_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_allocate_ids"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_allocate_ids"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.AllocateIdsResponse.to_json(
+ datastore.AllocateIdsResponse()
+ )
+ req.return_value.content = return_value
+
+ request = datastore.AllocateIdsRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.AllocateIdsResponse()
+
+ client.allocate_ids(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ client.reserve_ids(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ datastore.ReserveIdsRequest,
+ dict,
+ ],
+)
+def test_reserve_ids_rest_call_success(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_id": "sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = datastore.ReserveIdsResponse()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = datastore.ReserveIdsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.reserve_ids(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, datastore.ReserveIdsResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_reserve_ids_rest_interceptors(null_interceptor):
+ transport = transports.DatastoreRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(),
+ )
+ client = DatastoreClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.DatastoreRestInterceptor, "post_reserve_ids"
+ ) as post, mock.patch.object(
+ transports.DatastoreRestInterceptor, "pre_reserve_ids"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest())
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ return_value = datastore.ReserveIdsResponse.to_json(
+ datastore.ReserveIdsResponse()
+ )
+ req.return_value.content = return_value
+
+ request = datastore.ReserveIdsRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = datastore.ReserveIdsResponse()
+
+ client.reserve_ids(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+
+
+def test_cancel_operation_rest_bad_request(
+ request_type=operations_pb2.CancelOperationRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
+ )
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.cancel_operation(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.CancelOperationRequest,
+ dict,
+ ],
+)
+def test_cancel_operation_rest(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ request_init = {"name": "projects/sample1/operations/sample2"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = None
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = "{}"
+ response_value.content = json_return_value.encode("UTF-8")
+
+ req.return_value = response_value
+
+ response = client.cancel_operation(request)
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_operation_rest_bad_request(
+ request_type=operations_pb2.DeleteOperationRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
+ )
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.delete_operation(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.DeleteOperationRequest,
+ dict,
+ ],
+)
+def test_delete_operation_rest(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ request_init = {"name": "projects/sample1/operations/sample2"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = None
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = "{}"
+ response_value.content = json_return_value.encode("UTF-8")
+
+ req.return_value = response_value
+
+ response = client.delete_operation(request)
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_get_operation_rest_bad_request(
+ request_type=operations_pb2.GetOperationRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict(
+ {"name": "projects/sample1/operations/sample2"}, request
+ )
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.get_operation(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.GetOperationRequest,
+ dict,
+ ],
+)
+def test_get_operation_rest(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ request_init = {"name": "projects/sample1/operations/sample2"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+
+ req.return_value = response_value
+
+ response = client.get_operation(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+
+
+def test_list_operations_rest_bad_request(
+ request_type=operations_pb2.ListOperationsRequest,
+):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ request = request_type()
+ request = json_format.ParseDict({"name": "projects/sample1"}, request)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = Request()
+ req.return_value = response_value
+ client.list_operations(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ operations_pb2.ListOperationsRequest,
+ dict,
+ ],
+)
+def test_list_operations_rest(request_type):
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ request_init = {"name": "projects/sample1"}
+ request = request_type(**request_init)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+
+ req.return_value = response_value
+
+ response = client.list_operations(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.ListOperationsResponse)
+
+
+def test_initialize_client_w_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_lookup_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ client.lookup(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.LookupRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_query_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ client.run_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_aggregation_query_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ client.run_aggregation_query(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_begin_transaction_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ client.begin_transaction(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_commit_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ client.commit(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.CommitRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_rollback_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ client.rollback(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_allocate_ids_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ client.allocate_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_reserve_ids_empty_call_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ client.reserve_ids(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest()
+
+ assert args[0] == request_msg
+
+
+def test_lookup_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ client.lookup(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_lookup_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.lookup), "__call__") as call:
+ client.lookup(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.LookupRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_query_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ client.run_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_query_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ client.run_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_aggregation_query_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ client.run_aggregation_query(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_run_aggregation_query_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_aggregation_query), "__call__"
+ ) as call:
+ client.run_aggregation_query(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_begin_transaction_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ client.begin_transaction(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_begin_transaction_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ client.begin_transaction(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_commit_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ client.commit(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_commit_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ client.commit(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.CommitRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_rollback_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ client.rollback(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_rollback_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ client.rollback(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.RollbackRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_allocate_ids_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ client.allocate_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_allocate_ids_routing_parameters_request_2_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call:
+ client.allocate_ids(request={"database_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_reserve_ids_routing_parameters_request_1_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ client.reserve_ids(request={"project_id": "sample1"})
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"})
+
+ assert args[0] == request_msg
+
+ expected_headers = {"project_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
+
+
+def test_reserve_ids_routing_parameters_request_2_rest():
+ client = DatastoreClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- channel = transport.grpc_channel
- assert channel
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call:
+ client.reserve_ids(request={"database_id": "sample1"})
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.DatastoreGrpcTransport,
- transports.DatastoreGrpcAsyncIOTransport,
- transports.DatastoreRestTransport,
- ],
-)
-def test_transport_adc(transport_class):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transport_class()
- adc.assert_called_once()
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, kw = call.mock_calls[0]
+ request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"})
+ assert args[0] == request_msg
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- "rest",
- ],
-)
-def test_transport_kind(transport_name):
- transport = DatastoreClient.get_transport_class(transport_name)(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- assert transport.kind == transport_name
+ expected_headers = {"database_id": "sample1"}
+ assert (
+ gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"]
+ )
def test_transport_grpc_default():
@@ -6832,377 +8171,133 @@ def test_datastore_transport_channel_mtls_with_adc(transport_class):
("grpc.max_receive_message_length", -1),
],
)
- assert transport.grpc_channel == mock_grpc_channel
-
-
-def test_common_billing_account_path():
- billing_account = "squid"
- expected = "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
- actual = DatastoreClient.common_billing_account_path(billing_account)
- assert expected == actual
-
-
-def test_parse_common_billing_account_path():
- expected = {
- "billing_account": "clam",
- }
- path = DatastoreClient.common_billing_account_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreClient.parse_common_billing_account_path(path)
- assert expected == actual
-
-
-def test_common_folder_path():
- folder = "whelk"
- expected = "folders/{folder}".format(
- folder=folder,
- )
- actual = DatastoreClient.common_folder_path(folder)
- assert expected == actual
-
-
-def test_parse_common_folder_path():
- expected = {
- "folder": "octopus",
- }
- path = DatastoreClient.common_folder_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreClient.parse_common_folder_path(path)
- assert expected == actual
-
-
-def test_common_organization_path():
- organization = "oyster"
- expected = "organizations/{organization}".format(
- organization=organization,
- )
- actual = DatastoreClient.common_organization_path(organization)
- assert expected == actual
-
-
-def test_parse_common_organization_path():
- expected = {
- "organization": "nudibranch",
- }
- path = DatastoreClient.common_organization_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreClient.parse_common_organization_path(path)
- assert expected == actual
-
-
-def test_common_project_path():
- project = "cuttlefish"
- expected = "projects/{project}".format(
- project=project,
- )
- actual = DatastoreClient.common_project_path(project)
- assert expected == actual
-
-
-def test_parse_common_project_path():
- expected = {
- "project": "mussel",
- }
- path = DatastoreClient.common_project_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreClient.parse_common_project_path(path)
- assert expected == actual
-
-
-def test_common_location_path():
- project = "winkle"
- location = "nautilus"
- expected = "projects/{project}/locations/{location}".format(
- project=project,
- location=location,
- )
- actual = DatastoreClient.common_location_path(project, location)
- assert expected == actual
-
-
-def test_parse_common_location_path():
- expected = {
- "project": "scallop",
- "location": "abalone",
- }
- path = DatastoreClient.common_location_path(**expected)
-
- # Check that the path construction is reversible.
- actual = DatastoreClient.parse_common_location_path(path)
- assert expected == actual
-
-
-def test_client_with_default_client_info():
- client_info = gapic_v1.client_info.ClientInfo()
-
- with mock.patch.object(
- transports.DatastoreTransport, "_prep_wrapped_messages"
- ) as prep:
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
-
- with mock.patch.object(
- transports.DatastoreTransport, "_prep_wrapped_messages"
- ) as prep:
- transport_class = DatastoreClient.get_transport_class()
- transport = transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
-
-
-@pytest.mark.asyncio
-async def test_transport_close_async():
- client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
- with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
- ) as close:
- async with client:
- close.assert_not_called()
- close.assert_called_once()
-
-
-def test_cancel_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.CancelOperationRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.cancel_operation(request)
-
-
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.CancelOperationRequest,
- dict,
- ],
-)
-def test_cancel_operation_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = None
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = "{}"
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
-
- response = client.cancel_operation(request)
-
- # Establish that the response is the type that we expect.
- assert response is None
-
-
-def test_delete_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.delete_operation(request)
+ assert transport.grpc_channel == mock_grpc_channel
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.DeleteOperationRequest,
- dict,
- ],
-)
-def test_delete_operation_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+def test_common_billing_account_path():
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = None
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = "{}"
+ actual = DatastoreClient.common_billing_account_path(billing_account)
+ assert expected == actual
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.delete_operation(request)
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = DatastoreClient.common_billing_account_path(**expected)
- # Establish that the response is the type that we expect.
- assert response is None
+ # Check that the path construction is reversible.
+ actual = DatastoreClient.parse_common_billing_account_path(path)
+ assert expected == actual
-def test_get_operation_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.GetOperationRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+def test_common_folder_path():
+ folder = "whelk"
+ expected = "folders/{folder}".format(
+ folder=folder,
)
+ actual = DatastoreClient.common_folder_path(folder)
+ assert expected == actual
- request = request_type()
- request = json_format.ParseDict(
- {"name": "projects/sample1/operations/sample2"}, request
- )
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.get_operation(request)
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = DatastoreClient.common_folder_path(**expected)
+ # Check that the path construction is reversible.
+ actual = DatastoreClient.parse_common_folder_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.GetOperationRequest,
- dict,
- ],
-)
-def test_get_operation_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
- request_init = {"name": "projects/sample1/operations/sample2"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation()
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
+def test_common_organization_path():
+ organization = "oyster"
+ expected = "organizations/{organization}".format(
+ organization=organization,
+ )
+ actual = DatastoreClient.common_organization_path(organization)
+ assert expected == actual
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.get_operation(request)
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = DatastoreClient.common_organization_path(**expected)
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.Operation)
+ # Check that the path construction is reversible.
+ actual = DatastoreClient.parse_common_organization_path(path)
+ assert expected == actual
-def test_list_operations_rest_bad_request(
- transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
-):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+def test_common_project_path():
+ project = "cuttlefish"
+ expected = "projects/{project}".format(
+ project=project,
)
+ actual = DatastoreClient.common_project_path(project)
+ assert expected == actual
- request = request_type()
- request = json_format.ParseDict({"name": "projects/sample1"}, request)
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.list_operations(request)
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = DatastoreClient.common_project_path(**expected)
+ # Check that the path construction is reversible.
+ actual = DatastoreClient.parse_common_project_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "request_type",
- [
- operations_pb2.ListOperationsRequest,
- dict,
- ],
-)
-def test_list_operations_rest(request_type):
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
)
- request_init = {"name": "projects/sample1"}
- request = request_type(**request_init)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.ListOperationsResponse()
+ actual = DatastoreClient.common_location_path(project, location)
+ assert expected == actual
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = DatastoreClient.common_location_path(**expected)
- response = client.list_operations(request)
+ # Check that the path construction is reversible.
+ actual = DatastoreClient.parse_common_location_path(path)
+ assert expected == actual
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.ListOperationsResponse)
+
+def test_client_with_default_client_info():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.DatastoreTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.DatastoreTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = DatastoreClient.get_transport_class()
+ transport = transport_class(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
def test_delete_operation(transport: str = "grpc"):
@@ -7232,7 +8327,7 @@ def test_delete_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_delete_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7285,7 +8380,7 @@ def test_delete_operation_field_headers():
@pytest.mark.asyncio
async def test_delete_operation_field_headers_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7330,7 +8425,7 @@ def test_delete_operation_from_dict():
@pytest.mark.asyncio
async def test_delete_operation_from_dict_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
@@ -7371,7 +8466,7 @@ def test_cancel_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_cancel_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7424,7 +8519,7 @@ def test_cancel_operation_field_headers():
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7469,7 +8564,7 @@ def test_cancel_operation_from_dict():
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
@@ -7510,7 +8605,7 @@ def test_get_operation(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_get_operation_async(transport: str = "grpc_asyncio"):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7565,7 +8660,7 @@ def test_get_operation_field_headers():
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7612,7 +8707,7 @@ def test_get_operation_from_dict():
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
@@ -7655,7 +8750,7 @@ def test_list_operations(transport: str = "grpc"):
@pytest.mark.asyncio
async def test_list_operations_async(transport: str = "grpc_asyncio"):
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7710,7 +8805,7 @@ def test_list_operations_field_headers():
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7757,7 +8852,7 @@ def test_list_operations_from_dict():
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = DatastoreAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
@@ -7773,22 +8868,41 @@ async def test_list_operations_from_dict_async():
call.assert_called()
-def test_transport_close():
- transports = {
- "rest": "_session",
- "grpc": "_grpc_channel",
- }
+def test_transport_close_grpc():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
- for transport, close_name in transports.items():
- client = DatastoreClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+ client = DatastoreAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close_rest():
+ client = DatastoreClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_session")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py
index 38702dba..a6f63a80 100644
--- a/tests/unit/test_helpers.py
+++ b/tests/unit/test_helpers.py
@@ -361,19 +361,21 @@ def test_entity_to_protobuf_w_variable_meanings():
entity = Entity()
name = "quux"
entity[name] = values = [1, 20, 300]
- meaning = 9
- entity._meanings[name] = ([None, meaning, None], values)
+ root_meaning = 31
+ sub_meaning = 9
+ entity._meanings[name] = ((root_meaning, [None, sub_meaning, None]), values)
entity_pb = entity_to_protobuf(entity)
# Construct the expected protobuf.
expected_pb = entity_pb2.Entity()
value_pb = _new_value_pb(expected_pb, name)
+ value_pb.meaning = root_meaning
value0 = value_pb.array_value.values.add()
value0.integer_value = values[0]
# The only array entry with a meaning is the middle one.
value1 = value_pb.array_value.values.add()
value1.integer_value = values[1]
- value1.meaning = meaning
+ value1.meaning = sub_meaning
value2 = value_pb.array_value.values.add()
value2.integer_value = values[2]
@@ -1179,7 +1181,46 @@ def test__get_meaning_w_array_value():
sub_value_pb2.string_value = "bye"
result = _get_meaning(value_pb, is_list=True)
- assert meaning == result
+ # should preserve sub-value meanings as list
+ assert (None, [meaning, meaning]) == result
+
+
+def test__get_meaning_w_array_value_root_meaning():
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import _get_meaning
+
+ value_pb = entity_pb2.Value()
+ meaning = 9
+ value_pb.meaning = meaning
+ sub_value_pb1 = value_pb._pb.array_value.values.add()
+ sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+ sub_value_pb1.string_value = "hi"
+ sub_value_pb2.string_value = "bye"
+
+ result = _get_meaning(value_pb, is_list=True)
+ # should preserve sub-value meanings as list
+ assert (meaning, None) == result
+
+
+def test__get_meaning_w_array_value_root_and_sub_meanings():
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import _get_meaning
+
+ value_pb = entity_pb2.Value()
+ root_meaning = 9
+ sub_meaning = 3
+ value_pb.meaning = root_meaning
+ sub_value_pb1 = value_pb._pb.array_value.values.add()
+ sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+ sub_value_pb1.meaning = sub_value_pb2.meaning = sub_meaning
+ sub_value_pb1.string_value = "hi"
+ sub_value_pb2.string_value = "bye"
+
+ result = _get_meaning(value_pb, is_list=True)
+ # should preserve sub-value meanings as list
+ assert (root_meaning, [sub_meaning, sub_meaning]) == result
def test__get_meaning_w_array_value_multiple_meanings():
@@ -1198,7 +1239,7 @@ def test__get_meaning_w_array_value_multiple_meanings():
sub_value_pb2.string_value = "bye"
result = _get_meaning(value_pb, is_list=True)
- assert result == [meaning1, meaning2]
+ assert result == (None, [meaning1, meaning2])
def test__get_meaning_w_array_value_meaning_partially_unset():
@@ -1215,7 +1256,102 @@ def test__get_meaning_w_array_value_meaning_partially_unset():
sub_value_pb2.string_value = "bye"
result = _get_meaning(value_pb, is_list=True)
- assert result == [meaning1, None]
+ assert result == (None, [meaning1, None])
+
+
+def test__get_meaning_w_array_value_meaning_fully_unset():
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import _get_meaning
+
+ value_pb = entity_pb2.Value()
+ sub_value_pb1 = value_pb._pb.array_value.values.add()
+ sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+ sub_value_pb1.string_value = "hi"
+ sub_value_pb2.string_value = "bye"
+
+ result = _get_meaning(value_pb, is_list=True)
+ assert result is None
+
+
+@pytest.mark.parametrize("orig_root_meaning", [0, 1])
+@pytest.mark.parametrize("orig_sub_meaning", [0, 1])
+def test__set_pb_meaning_w_array_value_fully_unset(orig_root_meaning, orig_sub_meaning):
+ """
+ call _set_pb_meaning_from_entity with meaning=None data.
+ Should not touch proto's meaning field
+ """
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import _set_pb_meaning_from_entity
+ from google.cloud.datastore.entity import Entity
+
+ orig_pb = entity_pb2.Entity()
+ value_pb = orig_pb._pb.properties.get_or_create("value")
+ value_pb.meaning = orig_root_meaning
+ sub_value_pb1 = value_pb.array_value.values.add()
+ sub_value_pb1.meaning = orig_sub_meaning
+
+ entity = Entity(key="key")
+ entity._meanings = {"value": ((None, None), None)}
+ _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True)
+ assert value_pb.meaning == orig_root_meaning
+ assert value_pb.array_value.values[0].meaning == orig_sub_meaning
+
+
+@pytest.mark.parametrize("orig_meaning", [0, 1])
+def test__set_pb_meaning_w_value_unset(orig_meaning):
+ """
+ call _set_pb_meaning_from_entity with meaning=None data.
+ Should not touch proto's meaning field
+ """
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import _set_pb_meaning_from_entity
+ from google.cloud.datastore.entity import Entity
+
+ orig_pb = entity_pb2.Entity()
+ value_pb = orig_pb._pb.properties.get_or_create("value")
+ value_pb.meaning = orig_meaning
+
+ entity = Entity(key="key")
+ entity._meanings = {"value": (None, None)}
+ _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=False)
+ assert value_pb.meaning == orig_meaning
+
+
+def test__array_w_meaning_end_to_end():
+ """
+ Test proto->entity->proto with an array with a meaning field
+ """
+ from google.cloud.datastore_v1.types import entity as entity_pb2
+ from google.cloud.datastore.helpers import entity_from_protobuf
+ from google.cloud.datastore.helpers import entity_to_protobuf
+
+ orig_pb = entity_pb2.Entity()
+ value_pb = orig_pb._pb.properties.get_or_create("value")
+ value_pb.meaning = 31
+ sub_value_pb1 = value_pb.array_value.values.add()
+ sub_value_pb1.double_value = 1
+ sub_value_pb1.meaning = 1
+ sub_value_pb2 = value_pb.array_value.values.add()
+ sub_value_pb2.double_value = 2
+ sub_value_pb3 = value_pb.array_value.values.add()
+ sub_value_pb3.double_value = 3
+ sub_value_pb3.meaning = 3
+ # convert to entity
+ entity = entity_from_protobuf(orig_pb._pb)
+ assert entity._meanings["value"][0] == (31, [1, None, 3])
+ assert entity._meanings["value"][1] == [1, 2, 3]
+ # convert back to pb
+ output_entity_pb = entity_to_protobuf(entity)
+ final_pb = output_entity_pb._pb.properties["value"]
+ assert final_pb.meaning == 31
+ assert len(final_pb.array_value.values) == 3
+ assert final_pb.array_value.values[0].meaning == 1
+ assert final_pb.array_value.values[0].double_value == 1
+ assert final_pb.array_value.values[1].meaning == 0
+ assert final_pb.array_value.values[1].double_value == 2
+ assert final_pb.array_value.values[2].meaning == 3
+ assert final_pb.array_value.values[2].double_value == 3
def _make_geopoint(*args, **kwargs):