diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index cb06536d..76d0baa0 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:4370ced27a324687ede5da07132dcdc5381993502a5e8a3e31e16dc631d026f0
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
index 83c15486..41eaf2b9 100644
--- a/.github/.OwlBot.yaml
+++ b/.github/.OwlBot.yaml
@@ -13,7 +13,7 @@
# limitations under the License.
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
deep-remove-regex:
- /owl-bot-staging
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 59bd450e..3ca0a226 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
# If this is a continuous build, send the test log to the FlakyBot.
-# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
cleanup() {
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
index 8f43917d..b158096f 100644
--- a/.kokoro/presubmit/presubmit.cfg
+++ b/.kokoro/presubmit/presubmit.cfg
@@ -1 +1,7 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Disable system tests.
+env_vars: {
+ key: "RUN_SYSTEM_TESTS"
+ value: "false"
+}
diff --git a/.kokoro/presubmit/system-3.8.cfg b/.kokoro/presubmit/system-3.8.cfg
new file mode 100644
index 00000000..f4bcee3d
--- /dev/null
+++ b/.kokoro/presubmit/system-3.8.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "system-3.8"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index 33f7432a..80001a39 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastore/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index a65c0f39..c726211b 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastore/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
index f9cfcd33..714045a7 100644
--- a/.kokoro/samples/python3.6/periodic-head.cfg
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -7,5 +7,5 @@ env_vars: {
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+ value: "github/python-datastore/.kokoro/test-samples-against-head.sh"
}
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 18251bfc..a46730a6 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastore/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
index f9cfcd33..714045a7 100644
--- a/.kokoro/samples/python3.7/periodic-head.cfg
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -7,5 +7,5 @@ env_vars: {
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+ value: "github/python-datastore/.kokoro/test-samples-against-head.sh"
}
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 77f73452..d13fb561 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastore/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
index f9cfcd33..714045a7 100644
--- a/.kokoro/samples/python3.8/periodic-head.cfg
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -7,5 +7,5 @@ env_vars: {
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+ value: "github/python-datastore/.kokoro/test-samples-against-head.sh"
}
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
index 6e6deb84..4fcda9f0 100644
--- a/.kokoro/samples/python3.9/common.cfg
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-datastore/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-datastore/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
index f9cfcd33..714045a7 100644
--- a/.kokoro/samples/python3.9/periodic-head.cfg
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -7,5 +7,5 @@ env_vars: {
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+ value: "github/python-datastore/.kokoro/test-samples-against-head.sh"
}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index 7503e762..ba3a707b 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -23,6 +23,4 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-datastore
-
exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 311a8d54..8a324c9c 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do
EXIT=$?
# If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
$KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index b3014bb9..11c042d3 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -24,8 +24,6 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-datastore
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
# preserving the test runner implementation.
diff --git a/.trampolinerc b/.trampolinerc
index 383b6ec8..0eee72ab 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -16,15 +16,26 @@
# Add required env vars here.
required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
)
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f7d6a1a6..2b81e228 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,21 @@
[1]: https://pypi.org/project/google-cloud-datastore/#history
+## [2.2.0](https://www.github.com/googleapis/python-datastore/compare/v2.1.6...v2.2.0) (2021-10-08)
+
+
+### Features
+
+* add support for Python 3.10 ([#233](https://www.github.com/googleapis/python-datastore/issues/233)) ([f524c40](https://www.github.com/googleapis/python-datastore/commit/f524c40e8251c2b716ea87cd512404f0d6f1b019))
+
+### [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26)
+
+
+### Documentation
+
+* add Samples section to CONTRIBUTING.rst ([#195](https://www.github.com/googleapis/python-datastore/issues/195)) ([f607fb5](https://www.github.com/googleapis/python-datastore/commit/f607fb544a2f7279267e5a5a534fc31e573b6b74))
+
+
### [2.1.5](https://www.github.com/googleapis/python-datastore/compare/v2.1.4...v2.1.5) (2021-07-20)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 63d4d9de..37893012 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+ 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout:
# Configure remotes such that you can pull changes from the googleapis/python-datastore
# repository into your local repository.
$ git remote add upstream git@github.com:googleapis/python-datastore.git
- # fetch and merge changes from upstream into master
+ # fetch and merge changes from upstream into main
$ git fetch upstream
- $ git merge upstream/master
+ $ git merge upstream/main
Now your local repo is set up such that you will push changes to your GitHub
repo, from which you can submit a pull request.
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.9 -- -k
+ $ nox -s unit-3.10 -- -k
.. note::
@@ -110,12 +110,12 @@ Coding Style
variables::
export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
- export GOOGLE_CLOUD_TESTING_BRANCH="master"
+ export GOOGLE_CLOUD_TESTING_BRANCH="main"
By doing this, you are specifying the location of the most up-to-date
- version of ``python-datastore``. The the suggested remote name ``upstream``
- should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``master``).
+ version of ``python-datastore``. The
+ remote name ``upstream`` should point to the official ``googleapis``
+ checkout and the branch should be the default branch on that remote (``main``).
- This repository contains configuration for the
`pre-commit `__ tool, which automates checking
@@ -202,6 +202,30 @@ Build the docs via:
$ nox -s docs
+*************************
+Samples and code snippets
+*************************
+
+Code samples and snippets live in the `samples/` catalogue. Feel free to
+provide more examples, but make sure to write tests for those examples.
+Each folder containing example code requires its own `noxfile.py` script
+which automates testing. If you decide to create a new folder, you can
+base it on the `samples/snippets` folder (providing `noxfile.py` and
+the requirements files).
+
+The tests will run against a real Google Cloud Project, so you should
+configure them just like the System Tests.
+
+- To run sample tests, you can execute::
+
+ # Run all tests in a folder
+ $ cd samples/snippets
+ $ nox -s py-3.8
+
+ # Run a single sample test
+ $ cd samples/snippets
+ $ nox -s py-3.8 -- -k
+
********************************************
Note About ``README`` as it pertains to PyPI
********************************************
@@ -210,7 +234,7 @@ The `description on PyPI`_ for the project comes directly from the
``README``. Due to the reStructuredText (``rst``) parser used by
PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
instead of
-``https://github.com/googleapis/python-datastore/blob/master/CONTRIBUTING.rst``)
+``https://github.com/googleapis/python-datastore/blob/main/CONTRIBUTING.rst``)
may cause problems creating links or rendering the description.
.. _description on PyPI: https://pypi.org/project/google-cloud-datastore
@@ -226,16 +250,18 @@ We support:
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
+- `Python 3.10`_
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
Supported versions can be found in our ``noxfile.py`` `config`_.
-.. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py
+.. _config: https://github.com/googleapis/python-datastore/blob/main/noxfile.py
We also explicitly decided to support Python 3 beginning with version 3.6.
diff --git a/README.rst b/README.rst
index bef8a2fc..b142bc22 100644
--- a/README.rst
+++ b/README.rst
@@ -13,7 +13,7 @@ all other queries.
- `Product Documentation`_
.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability
+ :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability
.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg
:target: https://pypi.org/project/google-cloud-datastore/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg
diff --git a/docs/conf.py b/docs/conf.py
index f1b50787..d51558be 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,8 +76,8 @@
# The encoding of source files.
# source_encoding = 'utf-8-sig'
-# The master toctree document.
-master_doc = "index"
+# The root toctree document.
+root_doc = "index"
# General information about the project.
project = "google-cloud-datastore"
@@ -110,6 +110,7 @@
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
+ "**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
@@ -279,7 +280,7 @@
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-datastore.tex",
"google-cloud-datastore Documentation",
author,
@@ -314,7 +315,7 @@
# (source start file, name, description, authors, manual section).
man_pages = [
(
- master_doc,
+ root_doc,
"google-cloud-datastore",
"google-cloud-datastore Documentation",
[author],
@@ -333,7 +334,7 @@
# dir menu entry, description, category)
texinfo_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-datastore",
"google-cloud-datastore Documentation",
author,
diff --git a/google/cloud/datastore/batch.py b/google/cloud/datastore/batch.py
index 7b0b4758..ba8fe6b7 100644
--- a/google/cloud/datastore/batch.py
+++ b/google/cloud/datastore/batch.py
@@ -32,31 +32,58 @@ class Batch(object):
For example, the following snippet of code will put the two ``save``
operations and the ``delete`` operation into the same mutation, and send
- them to the server in a single API request::
+ them to the server in a single API request:
- >>> from google.cloud import datastore
- >>> client = datastore.Client()
- >>> batch = client.batch()
- >>> batch.begin()
- >>> batch.put(entity1)
- >>> batch.put(entity2)
- >>> batch.delete(key3)
- >>> batch.commit()
+ .. testsetup:: batch
+
+ import uuid
+
+ from google.cloud import datastore
+
+ unique = str(uuid.uuid4())[0:8]
+ client = datastore.Client(namespace='ns{}'.format(unique))
+
+ .. doctest:: batch
+
+ >>> entity1 = datastore.Entity(client.key('EntityKind', 1234))
+ >>> entity2 = datastore.Entity(client.key('EntityKind', 2345))
+ >>> key3 = client.key('EntityKind', 3456)
+ >>> batch = client.batch()
+ >>> batch.begin()
+ >>> batch.put(entity1)
+ >>> batch.put(entity2)
+ >>> batch.delete(key3)
+ >>> batch.commit()
You can also use a batch as a context manager, in which case
:meth:`commit` will be called automatically if its block exits without
- raising an exception::
+ raising an exception:
+
+ .. doctest:: batch
+
+ >>> with client.batch() as batch:
+ ... batch.put(entity1)
+ ... batch.put(entity2)
+ ... batch.delete(key3)
+
+ By default, no updates will be sent if the block exits with an error:
+
+ .. doctest:: batch
- >>> with batch:
- ... batch.put(entity1)
- ... batch.put(entity2)
- ... batch.delete(key3)
+ >>> def do_some_work(batch):
+ ... return
+ >>> with client.batch() as batch:
+ ... do_some_work(batch)
+ ... raise Exception() # rolls back
+ Traceback (most recent call last):
+ ...
+ Exception
- By default, no updates will be sent if the block exits with an error::
+ .. testcleanup:: txn
- >>> with batch:
- ... do_some_work(batch)
- ... raise Exception() # rolls back
+ with client.batch() as batch:
+ batch.delete(client.key('EntityKind', 1234))
+ batch.delete(client.key('EntityKind', 2345))
:type client: :class:`google.cloud.datastore.client.Client`
:param client: The client used to connect to datastore.
diff --git a/google/cloud/datastore/client.py b/google/cloud/datastore/client.py
index 28d968ce..b5de0fab 100644
--- a/google/cloud/datastore/client.py
+++ b/google/cloud/datastore/client.py
@@ -757,58 +757,37 @@ def query(self, **kwargs):
.. testsetup:: query
- import os
import uuid
from google.cloud import datastore
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
+ unique = str(uuid.uuid4())[0:8]
client = datastore.Client(namespace='ns{}'.format(unique))
- query = client.query(kind='_Doctest')
- def do_something(entity):
+ def do_something_with(entity):
pass
.. doctest:: query
>>> query = client.query(kind='MyKind')
>>> query.add_filter('property', '=', 'val')
+
Using the query iterator
.. doctest:: query
+ >>> filters = [('property', '=', 'val')]
+ >>> query = client.query(kind='MyKind', filters=filters)
>>> query_iter = query.fetch()
>>> for entity in query_iter:
- ... do_something(entity)
+ ... do_something_with(entity)
or manually page through results
- .. testsetup:: query-page
-
- import os
- import uuid
-
- from google.cloud import datastore
- from tests.system.test_system import Config # system tests
-
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
- client = datastore.Client(namespace='ns{}'.format(unique))
-
- key = client.key('_Doctest')
- entity1 = datastore.Entity(key=key)
- entity1['foo'] = 1337
- entity2 = datastore.Entity(key=key)
- entity2['foo'] = 42
- Config.TO_DELETE.extend([entity1, entity2])
- client.put_multi([entity1, entity2])
-
- query = client.query(kind='_Doctest')
- cursor = None
-
- .. doctest:: query-page
+ .. doctest:: query
- >>> query_iter = query.fetch(start_cursor=cursor)
+ >>> query_iter = query.fetch()
>>> pages = query_iter.pages
>>>
>>> first_page = next(pages)
diff --git a/google/cloud/datastore/entity.py b/google/cloud/datastore/entity.py
index c317bdda..b1148865 100644
--- a/google/cloud/datastore/entity.py
+++ b/google/cloud/datastore/entity.py
@@ -42,23 +42,21 @@ class Entity(dict):
.. testsetup:: entity-ctor
- import os
import uuid
from google.cloud import datastore
- from tests.system.test_system import Config # system tests
+ from google.cloud import datastore
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
+ unique = str(uuid.uuid4())[0:8]
client = datastore.Client(namespace='ns{}'.format(unique))
- key = client.key('EntityKind', 1234, namespace='_Doctest')
- entity = datastore.Entity(key=key)
- entity['property'] = 'value'
- Config.TO_DELETE.append(entity)
+ entity = datastore.Entity(client.key('EntityKind', 1234))
+ entity['property'] = 'value'
client.put(entity)
.. doctest:: entity-ctor
+ >>> key = client.key('EntityKind', 1234)
>>> client.get(key)
@@ -70,6 +68,10 @@ class Entity(dict):
>>> entity['age'] = 20
>>> entity['name'] = 'JJ'
+ .. testcleanup:: entity-ctor
+
+ client.delete(entity.key)
+
However, not all types are allowed as a value for a Google Cloud Datastore
entity. The following basic types are supported by the API:
diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py
index 7222fbdf..85dfc240 100644
--- a/google/cloud/datastore/helpers.py
+++ b/google/cloud/datastore/helpers.py
@@ -313,10 +313,16 @@ def _pb_attr_value(val):
For example:
- >>> _pb_attr_value(1234)
- ('integer_value', 1234)
- >>> _pb_attr_value('my_string')
- ('string_value', 'my_string')
+ .. testsetup:: pb-attr-value
+
+ from google.cloud.datastore.helpers import _pb_attr_value
+
+ .. doctest:: pb-attr-value
+
+ >>> _pb_attr_value(1234)
+ ('integer_value', 1234)
+ >>> _pb_attr_value('my_string')
+ ('string_value', 'my_string')
:type val:
:class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
diff --git a/google/cloud/datastore/key.py b/google/cloud/datastore/key.py
index 98502f9c..76f18455 100644
--- a/google/cloud/datastore/key.py
+++ b/google/cloud/datastore/key.py
@@ -598,6 +598,8 @@ def _get_flat_path(path_pb):
For example
+ .. code:: python
+
Element {
type: "parent"
id: 59
diff --git a/google/cloud/datastore/query.py b/google/cloud/datastore/query.py
index b4b24ca7..5e4f4937 100644
--- a/google/cloud/datastore/query.py
+++ b/google/cloud/datastore/query.py
@@ -215,13 +215,21 @@ def add_filter(self, property_name, operator, value):
where property is a property stored on the entity in the datastore
and operator is one of ``OPERATORS``
- (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
+ (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):
- >>> from google.cloud import datastore
- >>> client = datastore.Client()
- >>> query = client.query(kind='Person')
- >>> query = query.add_filter('name', '=', 'James')
- >>> query = query.add_filter('age', '>', 50)
+ .. testsetup:: query-filter
+
+ import uuid
+
+ from google.cloud import datastore
+
+ client = datastore.Client()
+
+ .. doctest:: query-filter
+
+ >>> query = client.query(kind='Person')
+ >>> query = query.add_filter('name', '=', 'James')
+ >>> query = query.add_filter('age', '>', 50)
:type property_name: str
:param property_name: A property name.
@@ -349,16 +357,37 @@ def fetch(
):
"""Execute the Query; return an iterator for the matching entities.
- For example::
+ For example:
+
+ .. testsetup:: query-fetch
+
+ import uuid
+
+ from google.cloud import datastore
+
+ unique = str(uuid.uuid4())[0:8]
+ client = datastore.Client(namespace='ns{}'.format(unique))
+
+
+ .. doctest:: query-fetch
+
+ >>> andy = datastore.Entity(client.key('Person', 1234))
+ >>> andy['name'] = 'Andy'
+ >>> sally = datastore.Entity(client.key('Person', 2345))
+ >>> sally['name'] = 'Sally'
+ >>> bobby = datastore.Entity(client.key('Person', 3456))
+ >>> bobby['name'] = 'Bobby'
+ >>> client.put_multi([andy, sally, bobby])
+ >>> query = client.query(kind='Person')
+ >>> result = list(query.add_filter('name', '=', 'Sally').fetch())
+ >>> result
+ []
+
+ .. testcleanup:: query-fetch
- >>> from google.cloud import datastore
- >>> client = datastore.Client()
- >>> query = client.query(kind='Person')
- >>> result = query.add_filter('name', '=', 'Sally').fetch()
- >>> list(result)
- [, , ...]
- >>> list(query.fetch(1))
- []
+ client.delete(andy.key)
+ client.delete(sally.key)
+ client.delete(bobby.key)
:type limit: int
:param limit: (Optional) limit passed through to the iterator.
diff --git a/google/cloud/datastore/transaction.py b/google/cloud/datastore/transaction.py
index 21cac1a7..5da64198 100644
--- a/google/cloud/datastore/transaction.py
+++ b/google/cloud/datastore/transaction.py
@@ -41,35 +41,26 @@ class Transaction(Batch):
operations (either ``insert`` or ``upsert``) into the same
mutation, and execute those within a transaction:
- .. testsetup:: txn-put-multi, txn-api
+ .. testsetup:: txn
- import os
import uuid
from google.cloud import datastore
- from tests.system.test_system import Config # system tests
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
+ unique = str(uuid.uuid4())[0:8]
client = datastore.Client(namespace='ns{}'.format(unique))
- key1 = client.key('_Doctest')
- entity1 = datastore.Entity(key=key1)
- entity1['foo'] = 1337
- key2 = client.key('_Doctest', 'abcd1234')
- entity2 = datastore.Entity(key=key2)
- entity2['foo'] = 42
-
- Config.TO_DELETE.extend([entity1, entity2])
-
- .. doctest:: txn-put-multi
+ .. doctest:: txn
+ >>> entity1 = datastore.Entity(client.key('EntityKind', 1234))
+ >>> entity2 = datastore.Entity(client.key('EntityKind', 2345))
>>> with client.transaction():
... client.put_multi([entity1, entity2])
Because it derives from :class:`~google.cloud.datastore.batch.Batch`,
:class:`Transaction` also provides :meth:`put` and :meth:`delete` methods:
- .. doctest:: txn-api
+ .. doctest:: txn
>>> with client.transaction() as xact:
... xact.put(entity1)
@@ -78,24 +69,12 @@ class Transaction(Batch):
By default, the transaction is rolled back if the transaction block
exits with an error:
- .. testsetup:: txn-error
-
- import os
- import uuid
-
- from google.cloud import datastore
-
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
- client = datastore.Client(namespace='ns{}'.format(unique))
-
- def do_some_work():
- return
-
- class SomeException(Exception):
- pass
-
- .. doctest:: txn-error
+ .. doctest:: txn
+ >>> def do_some_work():
+ ... return
+ >>> class SomeException(Exception):
+ ... pass
>>> with client.transaction():
... do_some_work()
... raise SomeException # rolls back
@@ -112,58 +91,51 @@ class SomeException(Exception):
entities will not be available at save time! That means, if you
try:
- .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual
-
- import os
- import uuid
-
- from google.cloud import datastore
- from tests.system.test_system import Config # system tests
-
- unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8])
- client = datastore.Client(namespace='ns{}'.format(unique))
-
- def Entity(*args, **kwargs):
- entity = datastore.Entity(*args, **kwargs)
- Config.TO_DELETE.append(entity)
- return entity
-
- .. doctest:: txn-entity-key
+ .. doctest:: txn
>>> with client.transaction():
- ... entity = Entity(key=client.key('Thing'))
- ... client.put(entity)
+ ... thing1 = datastore.Entity(key=client.key('Thing'))
+ ... client.put(thing1)
- ``entity`` won't have a complete key until the transaction is
+ ``thing1`` won't have a complete key until the transaction is
committed.
Once you exit the transaction (or call :meth:`commit`), the
automatically generated ID will be assigned to the entity:
- .. doctest:: txn-entity-key-after
+ .. doctest:: txn
>>> with client.transaction():
- ... entity = Entity(key=client.key('Thing'))
- ... client.put(entity)
- ... print(entity.key.is_partial) # There is no ID on this key.
+ ... thing2 = datastore.Entity(key=client.key('Thing'))
+ ... client.put(thing2)
+ ... print(thing2.key.is_partial) # There is no ID on this key.
...
True
- >>> print(entity.key.is_partial) # There *is* an ID.
+ >>> print(thing2.key.is_partial) # There *is* an ID.
False
If you don't want to use the context manager you can initialize a
transaction manually:
- .. doctest:: txn-manual
+ .. doctest:: txn
>>> transaction = client.transaction()
>>> transaction.begin()
>>>
- >>> entity = Entity(key=client.key('Thing'))
- >>> transaction.put(entity)
+ >>> thing3 = datastore.Entity(key=client.key('Thing'))
+ >>> transaction.put(thing3)
>>>
>>> transaction.commit()
+ .. testcleanup:: txn
+
+ with client.batch() as batch:
+ batch.delete(client.key('EntityKind', 1234))
+ batch.delete(client.key('EntityKind', 2345))
+ batch.delete(thing1.key)
+ batch.delete(thing2.key)
+ batch.delete(thing3.key)
+
:type client: :class:`google.cloud.datastore.client.Client`
:param client: the client used to connect to datastore.
diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py
index 54b693bd..bd0f8e5c 100644
--- a/google/cloud/datastore/version.py
+++ b/google/cloud/datastore/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.1.5"
+__version__ = "2.2.0"
diff --git a/noxfile.py b/noxfile.py
index 4dd9e06c..1ca31940 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -29,7 +29,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
@@ -42,6 +42,7 @@
"lint_setup_py",
"blacken",
"docs",
+ "doctests",
]
# Error if a python version is missing
@@ -84,9 +85,15 @@ def default(session):
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
-
- session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+ session.install(
+ "mock",
+ "asyncmock",
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+ "-c",
+ constraints_path,
+ )
session.install("-e", ".", "-c", constraints_path)
@@ -157,7 +164,6 @@ def system(session, disable_grpc):
env=env,
*session.posargs,
)
-
if system_test_folder_exists:
session.run(
"py.test",
@@ -204,6 +210,22 @@ def docs(session):
)
+@nox.session(python="3.6")
+def doctests(session):
+ # Doctests run against Python 3.6 only.
+ # It is difficult to make doctests run against both Python 2 and Python 3
+ # because they test string output equivalence, which is difficult to
+ # make match (e.g. unicode literals starting with "u").
+
+ # Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install("mock", "pytest", "sphinx", "google-cloud-testutils")
+ session.install("-e", ".")
+
+ # Run py.test against the system tests.
+ session.run("py.test", "tests/doctests.py")
+
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def docfx(session):
"""Build the docfx yaml files for this library."""
diff --git a/owlbot.py b/owlbot.py
index 8017fb4a..abbbb99a 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -15,6 +15,7 @@
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
+from synthtool.languages import python
common = gcp.CommonTemplates()
@@ -24,28 +25,28 @@
datastore_admin_default_version = "v1"
for library in s.get_staging_dirs(datastore_default_version):
- if library.parent.absolute() == 'datastore':
+ if library.parent.absolute() == "datastore":
s.move(library / f"google/cloud/datastore_{library.name}")
- s.move(library / f"tests/")
+ s.move(library / "tests/")
s.move(library / "scripts")
for library in s.get_staging_dirs(datastore_admin_default_version):
- if library.parent.absolute() == 'datastore_admin':
+ if library.parent.absolute() == "datastore_admin":
s.replace(
library / "google/**/datastore_admin_client.py",
"google-cloud-datastore-admin",
- "google-cloud-datstore"
+ "google-cloud-datstore",
)
# Remove spurious markup
s.replace(
"google/**/datastore_admin/client.py",
- "\s+---------------------------------(-)+",
- ""
+ r"\s+---------------------------------(-)+",
+ "",
)
s.move(library / f"google/cloud/datastore_admin_{library.name}")
- s.move(library / f"tests")
+ s.move(library / "tests")
s.move(library / "scripts")
s.remove_staging_dirs()
@@ -55,14 +56,20 @@
# ----------------------------------------------------------------------------
templated_files = common.py_library(
microgenerator=True,
+ split_system_tests=True,
+ unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"],
+)
+s.move(
+ templated_files,
+ excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS"],
)
-s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"])
+python.py_samples(skip_readmes=True)
# Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136)
-s.replace(
+assert 1 == s.replace(
"noxfile.py",
- """\
+ r"""\
@nox.session\(python=SYSTEM_TEST_PYTHON_VERSIONS\)
def system\(session\):
""",
@@ -73,7 +80,7 @@ def system(session, disable_grpc):
""",
)
-s.replace(
+assert 1 == s.replace(
"noxfile.py",
"""\
# Run py.test against the system tests.
@@ -83,62 +90,84 @@ def system(session, disable_grpc):
if disable_grpc:
env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True"
- # Run py.test against the system tests.
+# Run py.test against the system tests.
""",
)
-s.replace(
+assert 1 == s.replace(
"noxfile.py",
- """session\.run\(
- "py\.test",
- "--quiet",
- f"--junitxml=system_\{session\.python\}_sponge_log\.xml",
- system_test_path,
- \*session\.posargs
- \)""",
- """session.run(
- "py.test",
- "--quiet",
- f"--junitxml=system_{session.python}_sponge_log.xml",
- system_test_path,
- env=env,
- *session.posargs
- )
+ """\
+ system_test_path,
+""",
+ """\
+ system_test_path,
+ env=env,
""",
)
-s.replace(
+assert 1 == s.replace(
"noxfile.py",
- """session\.run\(
- "py\.test",
- "--quiet",
- f"--junitxml=system_\{session\.python\}_sponge_log\.xml",
- system_test_folder_path,
- \*session\.posargs
- \)""",
- """session.run(
- "py.test",
- "--quiet",
- f"--junitxml=system_{session.python}_sponge_log.xml",
- system_test_folder_path,
- env=env,
- *session.posargs
- )
+ """\
+ system_test_folder_path,
+""",
+ """\
+ system_test_folder_path,
+ env=env,
""",
)
-s.shell.run(["nox", "-s", "blacken"], hide_output=False)
+# Add nox session to exercise doctests
+assert 1 == s.replace(
+ "noxfile.py",
+ r"""\
+ "blacken",
+ "docs",
+""",
+ """\
+ "blacken",
+ "docs",
+ "doctests",
+""",
+)
+
+assert 1 == s.replace(
+ "noxfile.py",
+ r"""\
+@nox.session\(python=DEFAULT_PYTHON_VERSION\)
+def docfx\(session\):
+""",
+ """\
+@nox.session(python="3.6")
+def doctests(session):
+ # Doctests run against Python 3.6 only.
+ # It is difficult to make doctests run against both Python 2 and Python 3
+ # because they test string output equivalence, which is difficult to
+ # make match (e.g. unicode literals starting with "u").
+
+ # Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install("mock", "pytest", "sphinx", "google-cloud-testutils")
+ session.install("-e", ".")
+
+ # Run py.test against the system tests.
+ session.run("py.test", "tests/doctests.py")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+""",
+)
# Add documentation about creating indexes and populating data for system
# tests.
-num = s.replace(
+assert 1 == s.replace(
"CONTRIBUTING.rst",
- """\
+ r"""
\*\*\*\*\*\*\*\*\*\*\*\*\*
Test Coverage
\*\*\*\*\*\*\*\*\*\*\*\*\*
""",
- """\
+ """
- You'll need to create composite
`indexes `__
with the ``gcloud`` command line
@@ -168,7 +197,7 @@ def system(session, disable_grpc):
*************
Test Coverage
*************
-""")
+""",
+)
-if num != 1:
- raise Exception("Required replacement not made.")
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/renovate.json b/renovate.json
index c0489556..c21036d3 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,6 +1,9 @@
{
"extends": [
- "config:base", ":preserveSemverRanges"
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
],
"ignorePaths": [".pre-commit-config.yaml"],
"pip_requirements": {
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
index a0406dba..275d6498 100644
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -12,7 +12,7 @@ Install Dependencies
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
.. code-block:: bash
diff --git a/setup.py b/setup.py
index 19fe7711..6550cea3 100644
--- a/setup.py
+++ b/setup.py
@@ -89,6 +89,7 @@
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
"Operating System :: OS Independent",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
diff --git a/tests/doctests.py b/tests/doctests.py
index cc8d6a3a..32090561 100644
--- a/tests/doctests.py
+++ b/tests/doctests.py
@@ -21,6 +21,8 @@
SPHINX_CONF = """\
+root_doc = "contents"
+
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
@@ -33,6 +35,7 @@
.. automodule:: google.cloud.%s
:members:
+ :private-members:
"""
@@ -56,9 +59,7 @@ def _add_section(index, mod_name, file_obj):
content = SPHINX_SECTION_TEMPLATE % (index, mod_part)
file_obj.write(content)
- def _make_temp_docs(self):
- docs_dir = tempfile.mkdtemp(prefix="datastore-")
-
+ def _make_temp_docs(self, docs_dir):
conf_file = os.path.join(docs_dir, "conf.py")
with open(conf_file, "w") as file_obj:
@@ -66,29 +67,38 @@ def _make_temp_docs(self):
index_file = os.path.join(docs_dir, "contents.rst")
datastore_modules = self._submodules()
+
with open(index_file, "w") as file_obj:
self._add_section(0, "__init__", file_obj)
for index, datastore_module in enumerate(datastore_modules):
self._add_section(index + 1, datastore_module, file_obj)
- return docs_dir
-
def test_it(self):
from sphinx import application
- docs_dir = self._make_temp_docs()
- outdir = os.path.join(docs_dir, "doctest", "out")
- doctreedir = os.path.join(docs_dir, "doctest", "doctrees")
-
- app = application.Sphinx(
- srcdir=docs_dir,
- confdir=docs_dir,
- outdir=outdir,
- doctreedir=doctreedir,
- buildername="doctest",
- warningiserror=True,
- parallel=1,
- )
-
- app.build()
- self.assertEqual(app.statuscode, 0)
+ with tempfile.TemporaryDirectory(prefix="datastore-") as docs_dir:
+ self._make_temp_docs(docs_dir)
+ outdir = os.path.join(docs_dir, "doctest", "out")
+ doctreedir = os.path.join(docs_dir, "doctest", "doctrees")
+
+ app = application.Sphinx(
+ srcdir=docs_dir,
+ confdir=docs_dir,
+ outdir=outdir,
+ doctreedir=doctreedir,
+ buildername="doctest",
+ warningiserror=True,
+ parallel=1,
+ verbosity=1,
+ )
+
+ try:
+ app.build()
+ except Exception:
+ outfile = os.path.join(outdir, "output.txt")
+ with open(outfile, "r") as file_obj:
+ output = file_obj.read()
+ print(f"\n\nDoctest output\n--------------\n\n{output}")
+ raise
+ else:
+ self.assertEqual(app.statuscode, 0)
diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py
new file mode 100644
index 00000000..e8b5cf1c
--- /dev/null
+++ b/tests/system/_helpers.py
@@ -0,0 +1,44 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from google.cloud import datastore
+from google.cloud.datastore.client import DATASTORE_DATASET
+from test_utils.system import unique_resource_id
+
+EMULATOR_DATASET = os.getenv(DATASTORE_DATASET)
+
+
+def unique_id(prefix, separator="-"):
+ return f"{prefix}{unique_resource_id(separator)}"
+
+
+_SENTINEL = object()
+
+
+def clone_client(base_client, namespace=_SENTINEL):
+ if namespace is _SENTINEL:
+ namespace = base_client.namespace
+
+ kwargs = {}
+ if EMULATOR_DATASET is None:
+ kwargs["credentials"] = base_client._credentials
+
+ return datastore.Client(
+ project=base_client.project,
+ namespace=namespace,
+ _http=base_client._http,
+ **kwargs,
+ )
diff --git a/tests/system/conftest.py b/tests/system/conftest.py
new file mode 100644
index 00000000..61f8c1f0
--- /dev/null
+++ b/tests/system/conftest.py
@@ -0,0 +1,50 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import requests
+
+from google.cloud import datastore
+from . import _helpers
+
+
+@pytest.fixture(scope="session")
+def in_emulator():
+ return _helpers.EMULATOR_DATASET is not None
+
+
+@pytest.fixture(scope="session")
+def test_namespace():
+ return _helpers.unique_id("ns")
+
+
+@pytest.fixture(scope="session")
+def datastore_client(test_namespace):
+ if _helpers.EMULATOR_DATASET is not None:
+ http = requests.Session() # Un-authorized.
+ return datastore.Client(
+ project=_helpers.EMULATOR_DATASET, namespace=test_namespace, _http=http,
+ )
+ else:
+ return datastore.Client(namespace=test_namespace)
+
+
+@pytest.fixture(scope="function")
+def entities_to_delete(datastore_client):
+ entities_to_delete = []
+
+ yield entities_to_delete
+
+ with datastore_client.transaction():
+ datastore_client.delete_multi(entities_to_delete)
diff --git a/tests/system/test_allocate_reserve_ids.py b/tests/system/test_allocate_reserve_ids.py
new file mode 100644
index 00000000..8c40538f
--- /dev/null
+++ b/tests/system/test_allocate_reserve_ids.py
@@ -0,0 +1,61 @@
+# Copyright 2011 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import warnings
+
+
+def test_client_allocate_ids(datastore_client):
+ num_ids = 10
+ allocated_keys = datastore_client.allocate_ids(
+ datastore_client.key("Kind"), num_ids,
+ )
+ assert len(allocated_keys) == num_ids
+
+ unique_ids = set()
+ for key in allocated_keys:
+ unique_ids.add(key.id)
+ assert key.name is None
+ assert key.id is not None
+
+ assert len(unique_ids) == num_ids
+
+
+def test_client_reserve_ids_sequential(datastore_client):
+ num_ids = 10
+ key = datastore_client.key("Kind", 1234)
+
+ # Smoke test to make sure it doesn't blow up. No return value or
+ # verifiable side effect to verify.
+ datastore_client.reserve_ids_sequential(key, num_ids)
+
+
+def test_client_reserve_ids_deprecated(datastore_client):
+ num_ids = 10
+ key = datastore_client.key("Kind", 1234)
+
+ with warnings.catch_warnings(record=True) as warned:
+ datastore_client.reserve_ids(key, num_ids)
+
+ assert len(warned) == 1
+ assert warned[0].category is DeprecationWarning
+ assert "reserve_ids_sequential" in str(warned[0].message)
+
+
+def test_client_reserve_ids_multi(datastore_client):
+ key1 = datastore_client.key("Kind", 1234)
+ key2 = datastore_client.key("Kind", 1235)
+
+ # Smoke test to make sure it doesn't blow up. No return value or
+ # verifiable side effect to verify.
+ datastore_client.reserve_ids_multi([key1, key2])
diff --git a/tests/system/test_put.py b/tests/system/test_put.py
new file mode 100644
index 00000000..5e884cf3
--- /dev/null
+++ b/tests/system/test_put.py
@@ -0,0 +1,163 @@
+# Copyright 2011 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import pytest
+
+from google.cloud._helpers import UTC
+from google.cloud import datastore
+from google.cloud.datastore.helpers import GeoPoint
+
+from . import _helpers
+
+
+def parent_key(datastore_client):
+ return datastore_client.key("Blog", "PizzaMan")
+
+
+def _get_post(datastore_client, id_or_name=None, post_content=None):
+ post_content = post_content or {
+ "title": u"How to make the perfect pizza in your grill",
+ "tags": [u"pizza", u"grill"],
+ "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC),
+ "author": u"Silvano",
+ "isDraft": False,
+ "wordCount": 400,
+ "rating": 5.0,
+ }
+ # Create an entity with the given content.
+ # NOTE: Using a parent to ensure consistency for query
+ # in `test_empty_kind`.
+ key = datastore_client.key("Post", parent=parent_key(datastore_client))
+ entity = datastore.Entity(key=key)
+ entity.update(post_content)
+
+ # Update the entity key.
+ if id_or_name is not None:
+ entity.key = entity.key.completed_key(id_or_name)
+
+ return entity
+
+
+@pytest.mark.parametrize(
+ "name,key_id", [(None, None), ("post1", None), (None, 123456789)]
+)
+def test_client_put(datastore_client, entities_to_delete, name, key_id):
+ entity = _get_post(datastore_client, id_or_name=(name or key_id))
+ datastore_client.put(entity)
+ entities_to_delete.append(entity)
+
+ if name is not None:
+ assert entity.key.name == name
+ if key_id is not None:
+ assert entity.key.id == key_id
+
+ retrieved_entity = datastore_client.get(entity.key)
+ # Check the given and retrieved are the the same.
+ assert retrieved_entity == entity
+
+
+def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete):
+ with datastore_client.transaction() as xact:
+ entity1 = _get_post(datastore_client)
+ xact.put(entity1)
+ # Register entity to be deleted.
+ entities_to_delete.append(entity1)
+
+ second_post_content = {
+ "title": u"How to make the perfect homemade pasta",
+ "tags": [u"pasta", u"homemade"],
+ "publishedAt": datetime.datetime(2001, 1, 1),
+ "author": u"Silvano",
+ "isDraft": False,
+ "wordCount": 450,
+ "rating": 4.5,
+ }
+ entity2 = _get_post(datastore_client, post_content=second_post_content,)
+ xact.put(entity2)
+ # Register entity to be deleted.
+ entities_to_delete.append(entity2)
+
+ keys = [entity1.key, entity2.key]
+ matches = datastore_client.get_multi(keys)
+ assert len(matches) == 2
+
+
+def test_client_query_w_empty_kind(datastore_client):
+ query = datastore_client.query(kind="Post")
+ query.ancestor = parent_key(datastore_client)
+ posts = query.fetch(limit=2)
+ assert list(posts) == []
+
+
+def test_client_put_w_all_value_types(datastore_client, entities_to_delete):
+ key = datastore_client.key("TestPanObject", 1234)
+ entity = datastore.Entity(key=key)
+ entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC)
+ key_stored = datastore_client.key("SavedKey", "right-here")
+ entity["key"] = key_stored
+ entity["truthy"] = True
+ entity["float"] = 2.718281828
+ entity["int"] = 3735928559
+ entity["words"] = u"foo"
+ entity["blob"] = b"seekretz"
+ entity_stored = datastore.Entity(key=key_stored)
+ entity_stored["hi"] = "bye"
+ entity["nested"] = entity_stored
+ entity["items"] = [1, 2, 3]
+ entity["geo"] = GeoPoint(1.0, 2.0)
+ entity["nothing_here"] = None
+
+ # Store the entity.
+ datastore_client.put(entity)
+ entities_to_delete.append(entity)
+
+ # Check the original and retrieved are the the same.
+ retrieved_entity = datastore_client.get(entity.key)
+ assert retrieved_entity == entity
+
+
+def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_delete):
+ parent_key = datastore_client.key("Residence", "NewYork")
+ key = datastore_client.key("Person", "name", parent=parent_key)
+ entity = datastore.Entity(key=key)
+ entity["fullName"] = u"Full name"
+ entity["linkedTo"] = key # Self reference.
+
+ datastore_client.put(entity)
+ entities_to_delete.append(entity)
+
+ query = datastore_client.query(kind="Person")
+ # Adding ancestor to ensure consistency.
+ query.ancestor = parent_key
+ query.add_filter("linkedTo", "=", key)
+
+ stored_persons = list(query.fetch(limit=2))
+ assert stored_persons == [entity]
+
+
+def test_client_put_w_empty_array(datastore_client, entities_to_delete):
+ local_client = _helpers.clone_client(datastore_client)
+
+ key = local_client.key("EmptyArray", 1234)
+ local_client = datastore.Client()
+ entity = datastore.Entity(key=key)
+ entity["children"] = []
+ local_client.put(entity)
+ entities_to_delete.append(entity)
+
+ retrieved = local_client.get(entity.key)
+
+ assert entity["children"] == retrieved["children"]
diff --git a/tests/system/test_query.py b/tests/system/test_query.py
new file mode 100644
index 00000000..c5921bc9
--- /dev/null
+++ b/tests/system/test_query.py
@@ -0,0 +1,344 @@
+# Copyright 2011 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import exceptions
+from test_utils.retry import RetryErrors
+
+from .utils import clear_datastore
+from .utils import populate_datastore
+from . import _helpers
+
+
+retry_503 = RetryErrors(exceptions.ServiceUnavailable)
+
+
+def _make_iterator(query, **kw):
+ # Do retry for errors raised during initial API call
+ return retry_503(query.fetch)(**kw)
+
+
+def _pull_iterator(query, **kw):
+ return list(_make_iterator(query, **kw))
+
+
+def _do_fetch(query, **kw):
+ # Do retry for errors raised during iteration
+ return retry_503(_pull_iterator)(query, **kw)
+
+
+@pytest.fixture(scope="session")
+def query_client(datastore_client):
+ return _helpers.clone_client(datastore_client, namespace=None)
+
+
+@pytest.fixture(scope="session")
+def ancestor_key(query_client, in_emulator):
+
+ # In the emulator, re-populating the datastore is cheap.
+ if in_emulator:
+ populate_datastore.add_characters(client=query_client)
+
+ ancestor_key = query_client.key(*populate_datastore.ANCESTOR)
+
+ yield ancestor_key
+
+ # In the emulator, destroy the query entities.
+ if in_emulator:
+ clear_datastore.remove_all_entities(client=query_client)
+
+
+def _make_ancestor_query(query_client, ancestor_key):
+ return query_client.query(kind="Character", ancestor=ancestor_key)
+
+
+@pytest.fixture(scope="function")
+def ancestor_query(query_client, ancestor_key):
+ return _make_ancestor_query(query_client, ancestor_key)
+
+
+def test_query_w_ancestor(ancestor_query):
+ query = ancestor_query
+ expected_matches = 8
+
+ # We expect 8, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+
+
+def test_query_w_limit_paging(ancestor_query):
+ query = ancestor_query
+ limit = 5
+
+ # Fetch characters.
+ iterator = query.fetch(limit=limit)
+ page = next(iterator.pages)
+ character_entities = list(page)
+ cursor = iterator.next_page_token
+ assert len(character_entities) == limit
+
+ # Check cursor after fetch.
+ assert cursor is not None
+
+ # Fetch remaining characters.
+ new_character_entities = _do_fetch(query, start_cursor=cursor)
+ characters_remaining = len(populate_datastore.CHARACTERS) - limit
+ assert len(new_character_entities) == characters_remaining
+
+
+def test_query_w_simple_filter(ancestor_query):
+ query = ancestor_query
+ query.add_filter("appearances", ">=", 20)
+ expected_matches = 6
+
+ # We expect 6, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+
+
+def test_query_w_multiple_filters(ancestor_query):
+ query = ancestor_query
+ query.add_filter("appearances", ">=", 26)
+ query = query.add_filter("family", "=", "Stark")
+ expected_matches = 4
+
+ # We expect 4, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+
+
+def test_query_key_filter(query_client, ancestor_query):
+ # Use the client for this test instead of the global.
+ query = ancestor_query
+ rickard_key = query_client.key(*populate_datastore.RICKARD)
+ query.key_filter(rickard_key)
+ expected_matches = 1
+
+ # We expect 1, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+
+
+def test_query_w_order(ancestor_query):
+ query = ancestor_query
+ query.order = "appearances"
+ expected_matches = 8
+
+ # We expect 8, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+
+ # Actually check the ordered data returned.
+ assert entities[0]["name"] == populate_datastore.CHARACTERS[0]["name"]
+ assert entities[7]["name"] == populate_datastore.CHARACTERS[3]["name"]
+
+
+def test_query_w_projection(ancestor_query):
+ filtered_query = ancestor_query
+ filtered_query.projection = ["name", "family"]
+ filtered_query.order = ["name", "family"]
+
+ # NOTE: There are 9 responses because of Catelyn. She has both
+ # Stark and Tully as her families, hence occurs twice in
+ # the results.
+ expected_matches = 9
+
+ # We expect 9, but allow the query to get 1 extra.
+ entities = list(filtered_query.fetch(limit=expected_matches + 1))
+ assert len(entities) == expected_matches
+
+ arya_entity = entities[0]
+ catelyn_stark_entity = entities[2]
+ catelyn_tully_entity = entities[3]
+ sansa_entity = entities[8]
+
+ assert dict(arya_entity) == {"name": "Arya", "family": "Stark"}
+
+ # Check both Catelyn keys are the same.
+ assert catelyn_stark_entity.key == catelyn_tully_entity.key
+ assert dict(catelyn_stark_entity) == {"name": "Catelyn", "family": "Stark"}
+ assert dict(catelyn_tully_entity) == {"name": "Catelyn", "family": "Tully"}
+
+ assert dict(sansa_entity) == {"name": "Sansa", "family": "Stark"}
+
+
+def test_query_w_paginate_simple_uuid_keys(query_client):
+
+ # See issue #4264
+ page_query = query_client.query(kind="uuid_key")
+ iterator = page_query.fetch()
+ seen = set()
+ page_count = 0
+
+ for page in iterator.pages:
+ page_count += 1
+ for entity in page:
+ uuid_str = entity.key.name
+ assert uuid_str not in seen
+ seen.add(uuid_str)
+
+ assert page_count > 1
+
+
+def test_query_paginate_simple_timestamp_keys(query_client):
+
+ # See issue #4264
+ page_query = query_client.query(kind="timestamp_key")
+ iterator = page_query.fetch()
+ seen = set()
+ page_count = 0
+
+ for page in iterator.pages:
+ page_count += 1
+ for entity in page:
+ timestamp = entity.key.id
+ assert timestamp not in seen
+ seen.add(timestamp)
+
+ assert page_count > 1
+
+
+def test_query_w_offset_w_timestamp_keys(query_client):
+ # See issue #4675
+ max_all = 10000
+ offset = 1
+ max_offset = max_all - offset
+ query = query_client.query(kind="timestamp_key")
+
+ all_w_limit = _do_fetch(query, limit=max_all)
+ assert len(all_w_limit) == max_all
+
+ offset_w_limit = _do_fetch(query, offset=offset, limit=max_offset)
+ assert offset_w_limit == all_w_limit[offset:]
+
+
+def test_query_paginate_with_offset(ancestor_query):
+ page_query = ancestor_query
+ page_query.order = "appearances"
+ offset = 2
+ limit = 3
+
+ iterator = page_query.fetch(limit=limit, offset=offset)
+
+ # Fetch characters.
+ page = next(iterator.pages)
+ entities = list(page)
+ assert len(entities) == limit
+ assert entities[0]["name"] == "Robb"
+ assert entities[1]["name"] == "Bran"
+ assert entities[2]["name"] == "Catelyn"
+
+ cursor = iterator.next_page_token
+
+ # Fetch next set of characters.
+ new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor)
+
+ entities = list(new_iterator)
+ assert len(entities) == limit
+ assert entities[0]["name"] == "Sansa"
+ assert entities[1]["name"] == "Jon Snow"
+ assert entities[2]["name"] == "Arya"
+
+
+def test_query_paginate_with_start_cursor(query_client, ancestor_key):
+ # Don't use fixture, because we need to create a clean copy later.
+ page_query = _make_ancestor_query(query_client, ancestor_key)
+ page_query.order = "appearances"
+ limit = 3
+ offset = 2
+
+ iterator = page_query.fetch(limit=limit, offset=offset)
+
+ # Fetch characters.
+ page = next(iterator.pages)
+ entities = list(page)
+ assert len(entities) == limit
+
+ cursor = iterator.next_page_token
+
+ # Use cursor to create a fresh query.
+ fresh_query = _make_ancestor_query(query_client, ancestor_key)
+ fresh_query.order = "appearances"
+
+ new_entities = list(fresh_query.fetch(start_cursor=cursor, limit=limit))
+
+ characters_remaining = len(populate_datastore.CHARACTERS) - limit - offset
+ assert len(new_entities) == characters_remaining
+ assert new_entities[0]["name"] == "Sansa"
+ assert new_entities[2]["name"] == "Arya"
+
+
+def test_query_distinct_on(ancestor_query):
+ query = ancestor_query
+ query.distinct_on = ["alive"]
+ expected_matches = 2
+
+ # We expect 2, but allow the query to get 1 extra.
+ entities = _do_fetch(query, limit=expected_matches + 1)
+
+ assert len(entities) == expected_matches
+ assert entities[0]["name"] == "Catelyn"
+ assert entities[1]["name"] == "Arya"
+
+
+@pytest.fixture(scope="session")
+def large_query_client(datastore_client):
+ large_query_client = _helpers.clone_client(
+ datastore_client, namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE,
+ )
+ # Populate the datastore if necessary.
+ populate_datastore.add_large_character_entities(client=large_query_client)
+
+ return large_query_client
+
+
+@pytest.fixture(scope="function")
+def large_query(large_query_client):
+ # Use the client for this test instead of the global.
+ return large_query_client.query(
+ kind=populate_datastore.LARGE_CHARACTER_KIND,
+ namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE,
+ )
+
+
+@pytest.mark.parametrize(
+ "limit,offset,expected",
+ [
+ # with no offset there are the correct # of results
+ (None, None, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS,),
+ # with no limit there are results (offset provided)
+ (None, 900, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900,),
+ # Offset beyond items larger: verify 200 items found
+ (200, 1100, 200,),
+ # offset within range, expect 50 despite larger limit")
+ (100, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 50, 50),
+ # Offset beyond items larger Verify no items found")
+ (200, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS + 1000, 0),
+ ],
+)
+def test_large_query(large_query, limit, offset, expected):
+ page_query = large_query
+ page_query.add_filter("family", "=", "Stark")
+ page_query.add_filter("alive", "=", False)
+
+ iterator = page_query.fetch(limit=limit, offset=offset)
+
+ entities = [e for e in iterator]
+ assert len(entities) == expected
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
deleted file mode 100644
index a91b99ae..00000000
--- a/tests/system/test_system.py
+++ /dev/null
@@ -1,636 +0,0 @@
-# Copyright 2014 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import os
-import unittest
-import warnings
-
-import requests
-
-from google.cloud._helpers import UTC
-from google.cloud import datastore
-from google.cloud.datastore.helpers import GeoPoint
-from google.cloud.datastore.client import DATASTORE_DATASET
-from google.cloud.exceptions import Conflict
-
-from test_utils.system import unique_resource_id
-
-from tests.system.utils import clear_datastore
-from tests.system.utils import populate_datastore
-
-
-class Config(object):
- """Run-time configuration to be modified at set-up.
-
- This is a mutable stand-in to allow test set-up to modify
- global state.
- """
-
- CLIENT = None
- TO_DELETE = []
-
-
-def clone_client(client):
- emulator_dataset = os.getenv(DATASTORE_DATASET)
-
- if emulator_dataset is None:
- return datastore.Client(
- project=client.project,
- namespace=client.namespace,
- credentials=client._credentials,
- _http=client._http,
- )
- else:
- return datastore.Client(
- project=client.project, namespace=client.namespace, _http=client._http,
- )
-
-
-def setUpModule():
- emulator_dataset = os.getenv(DATASTORE_DATASET)
- # Isolated namespace so concurrent test runs don't collide.
- test_namespace = "ns" + unique_resource_id()
- if emulator_dataset is None:
- Config.CLIENT = datastore.Client(namespace=test_namespace)
- else:
- http = requests.Session() # Un-authorized.
- Config.CLIENT = datastore.Client(
- project=emulator_dataset, namespace=test_namespace, _http=http,
- )
-
-
-def tearDownModule():
- with Config.CLIENT.transaction():
- Config.CLIENT.delete_multi(Config.TO_DELETE)
-
-
-class TestDatastore(unittest.TestCase):
- def setUp(self):
- self.case_entities_to_delete = []
-
- def tearDown(self):
- with Config.CLIENT.transaction():
- Config.CLIENT.delete_multi(self.case_entities_to_delete)
-
-
-class TestDatastoreAllocateIDs(TestDatastore):
- def test_allocate_ids(self):
- num_ids = 10
- allocated_keys = Config.CLIENT.allocate_ids(Config.CLIENT.key("Kind"), num_ids)
- self.assertEqual(len(allocated_keys), num_ids)
-
- unique_ids = set()
- for key in allocated_keys:
- unique_ids.add(key.id)
- self.assertIsNone(key.name)
- self.assertNotEqual(key.id, None)
-
- self.assertEqual(len(unique_ids), num_ids)
-
-
-class TestDatastoreReserveIDs(TestDatastore):
- def test_reserve_ids_sequential(self):
- # Smoke test to make sure it doesn't blow up. No return value or
- # verifiable side effect to verify.
- num_ids = 10
- Config.CLIENT.reserve_ids_sequential(Config.CLIENT.key("Kind", 1234), num_ids)
-
- def test_reserve_ids(self):
- with warnings.catch_warnings(record=True) as warned:
- num_ids = 10
- Config.CLIENT.reserve_ids(Config.CLIENT.key("Kind", 1234), num_ids)
-
- warned = [
- warning
- for warning in warned
- if "reserve_ids_sequential" in str(warning.message)
- ]
- assert len(warned) == 1
-
- def test_reserve_ids_multi(self):
- # Smoke test to make sure it doesn't blow up. No return value or
- # verifiable side effect to verify.
- keys = [Config.CLIENT.key("KIND", 1234), Config.CLIENT.key("KIND", 1235)]
- Config.CLIENT.reserve_ids_multi(keys)
-
-
-class TestDatastoreSave(TestDatastore):
- @classmethod
- def setUpClass(cls):
- cls.PARENT = Config.CLIENT.key("Blog", "PizzaMan")
-
- def _get_post(self, id_or_name=None, post_content=None):
- post_content = post_content or {
- "title": u"How to make the perfect pizza in your grill",
- "tags": [u"pizza", u"grill"],
- "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC),
- "author": u"Silvano",
- "isDraft": False,
- "wordCount": 400,
- "rating": 5.0,
- }
- # Create an entity with the given content.
- # NOTE: Using a parent to ensure consistency for query
- # in `test_empty_kind`.
- key = Config.CLIENT.key("Post", parent=self.PARENT)
- entity = datastore.Entity(key=key)
- entity.update(post_content)
-
- # Update the entity key.
- if id_or_name is not None:
- entity.key = entity.key.completed_key(id_or_name)
-
- return entity
-
- def _generic_test_post(self, name=None, key_id=None):
- entity = self._get_post(id_or_name=(name or key_id))
- Config.CLIENT.put(entity)
-
- # Register entity to be deleted.
- self.case_entities_to_delete.append(entity)
-
- if name is not None:
- self.assertEqual(entity.key.name, name)
- if key_id is not None:
- self.assertEqual(entity.key.id, key_id)
- retrieved_entity = Config.CLIENT.get(entity.key)
- # Check the given and retrieved are the the same.
- self.assertEqual(retrieved_entity, entity)
-
- def test_post_with_name(self):
- self._generic_test_post(name="post1")
-
- def test_post_with_id(self):
- self._generic_test_post(key_id=123456789)
-
- def test_post_with_generated_id(self):
- self._generic_test_post()
-
- def test_save_multiple(self):
- with Config.CLIENT.transaction() as xact:
- entity1 = self._get_post()
- xact.put(entity1)
- # Register entity to be deleted.
- self.case_entities_to_delete.append(entity1)
-
- second_post_content = {
- "title": u"How to make the perfect homemade pasta",
- "tags": [u"pasta", u"homemade"],
- "publishedAt": datetime.datetime(2001, 1, 1),
- "author": u"Silvano",
- "isDraft": False,
- "wordCount": 450,
- "rating": 4.5,
- }
- entity2 = self._get_post(post_content=second_post_content)
- xact.put(entity2)
- # Register entity to be deleted.
- self.case_entities_to_delete.append(entity2)
-
- keys = [entity1.key, entity2.key]
- matches = Config.CLIENT.get_multi(keys)
- self.assertEqual(len(matches), 2)
-
- def test_empty_kind(self):
- query = Config.CLIENT.query(kind="Post")
- query.ancestor = self.PARENT
- posts = list(query.fetch(limit=2))
- self.assertEqual(posts, [])
-
- def test_all_value_types(self):
- key = Config.CLIENT.key("TestPanObject", 1234)
- entity = datastore.Entity(key=key)
- entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC)
- key_stored = Config.CLIENT.key("SavedKey", "right-here")
- entity["key"] = key_stored
- entity["truthy"] = True
- entity["float"] = 2.718281828
- entity["int"] = 3735928559
- entity["words"] = u"foo"
- entity["blob"] = b"seekretz"
- entity_stored = datastore.Entity(key=key_stored)
- entity_stored["hi"] = "bye"
- entity["nested"] = entity_stored
- entity["items"] = [1, 2, 3]
- entity["geo"] = GeoPoint(1.0, 2.0)
- entity["nothing_here"] = None
-
- # Store the entity.
- self.case_entities_to_delete.append(entity)
- Config.CLIENT.put(entity)
-
- # Check the original and retrieved are the the same.
- retrieved_entity = Config.CLIENT.get(entity.key)
- self.assertEqual(retrieved_entity, entity)
-
-
-class TestDatastoreSaveKeys(TestDatastore):
- def test_save_key_self_reference(self):
- parent_key = Config.CLIENT.key("Residence", "NewYork")
- key = Config.CLIENT.key("Person", "name", parent=parent_key)
- entity = datastore.Entity(key=key)
- entity["fullName"] = u"Full name"
- entity["linkedTo"] = key # Self reference.
-
- Config.CLIENT.put(entity)
- self.case_entities_to_delete.append(entity)
-
- query = Config.CLIENT.query(kind="Person")
- # Adding ancestor to ensure consistency.
- query.ancestor = parent_key
- query.add_filter("linkedTo", "=", key)
-
- stored_persons = list(query.fetch(limit=2))
- self.assertEqual(stored_persons, [entity])
-
-
-class TestDatastoreQuery(TestDatastore):
- @classmethod
- def setUpClass(cls):
- cls.CLIENT = clone_client(Config.CLIENT)
- # Remove the namespace from the cloned client, since these
- # query tests rely on the entities to be already stored and indexed,
- # hence ``test_namespace`` set at runtime can't be used.
- cls.CLIENT.namespace = None
-
- # In the emulator, re-populating the datastore is cheap.
- if os.getenv(DATASTORE_DATASET) is not None:
- # Populate the datastore with the cloned client.
- populate_datastore.add_characters(client=cls.CLIENT)
-
- cls.CHARACTERS = populate_datastore.CHARACTERS
- # Use the client for this test instead of the global.
- cls.ANCESTOR_KEY = cls.CLIENT.key(*populate_datastore.ANCESTOR)
-
- @classmethod
- def tearDownClass(cls):
- # In the emulator, destroy the query entities.
- if os.getenv(DATASTORE_DATASET) is not None:
- # Use the client for this test instead of the global.
- clear_datastore.remove_all_entities(client=cls.CLIENT)
-
- def _base_query(self):
- # Use the client for this test instead of the global.
- return self.CLIENT.query(kind="Character", ancestor=self.ANCESTOR_KEY)
-
- def test_limit_queries(self):
- limit = 5
- query = self._base_query()
-
- # Fetch characters.
- iterator = query.fetch(limit=limit)
- page = next(iterator.pages)
- character_entities = list(page)
- cursor = iterator.next_page_token
- self.assertEqual(len(character_entities), limit)
-
- # Check cursor after fetch.
- self.assertIsNotNone(cursor)
-
- # Fetch remaining characters.
- new_character_entities = list(query.fetch(start_cursor=cursor))
- characters_remaining = len(self.CHARACTERS) - limit
- self.assertEqual(len(new_character_entities), characters_remaining)
-
- def test_query_simple_filter(self):
- query = self._base_query()
- query.add_filter("appearances", ">=", 20)
- expected_matches = 6
- # We expect 6, but allow the query to get 1 extra.
- entities = list(query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- def test_query_multiple_filters(self):
- query = self._base_query()
- query.add_filter("appearances", ">=", 26)
- query.add_filter("family", "=", "Stark")
- expected_matches = 4
- # We expect 4, but allow the query to get 1 extra.
- entities = list(query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- def test_ancestor_query(self):
- filtered_query = self._base_query()
-
- expected_matches = 8
- # We expect 8, but allow the query to get 1 extra.
- entities = list(filtered_query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- def test_query_key_filter(self):
- # Use the client for this test instead of the global.
- rickard_key = self.CLIENT.key(*populate_datastore.RICKARD)
-
- query = self._base_query()
- query.key_filter(rickard_key)
- expected_matches = 1
- # We expect 1, but allow the query to get 1 extra.
- entities = list(query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- def test_ordered_query(self):
- query = self._base_query()
- query.order = "appearances"
- expected_matches = 8
- # We expect 8, but allow the query to get 1 extra.
- entities = list(query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- # Actually check the ordered data returned.
- self.assertEqual(entities[0]["name"], self.CHARACTERS[0]["name"])
- self.assertEqual(entities[7]["name"], self.CHARACTERS[3]["name"])
-
- def test_projection_query(self):
- filtered_query = self._base_query()
- filtered_query.projection = ["name", "family"]
- filtered_query.order = ["name", "family"]
-
- # NOTE: There are 9 responses because of Catelyn. She has both
- # Stark and Tully as her families, hence occurs twice in
- # the results.
- expected_matches = 9
- # We expect 9, but allow the query to get 1 extra.
- entities = list(filtered_query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- arya_entity = entities[0]
- catelyn_tully_entity = entities[3]
- sansa_entity = entities[8]
-
- arya_dict = dict(arya_entity)
- self.assertEqual(arya_dict, {"name": "Arya", "family": "Stark"})
-
- catelyn_stark_entity = entities[2]
- catelyn_stark_dict = dict(catelyn_stark_entity)
- self.assertEqual(catelyn_stark_dict, {"name": "Catelyn", "family": "Stark"})
-
- catelyn_tully_dict = dict(catelyn_tully_entity)
- self.assertEqual(catelyn_tully_dict, {"name": "Catelyn", "family": "Tully"})
-
- # Check both Catelyn keys are the same.
- self.assertEqual(catelyn_stark_entity.key, catelyn_tully_entity.key)
-
- sansa_dict = dict(sansa_entity)
- self.assertEqual(sansa_dict, {"name": "Sansa", "family": "Stark"})
-
- def test_query_paginate_simple_uuid_keys(self):
-
- # See issue #4264
- page_query = self.CLIENT.query(kind="uuid_key")
- iterator = page_query.fetch()
-
- seen = set()
- page_count = 0
- for page in iterator.pages:
- page_count += 1
- for entity in page:
- uuid_str = entity.key.name
- self.assertNotIn(uuid_str, seen, uuid_str)
- seen.add(uuid_str)
-
- self.assertTrue(page_count > 1)
-
- def test_query_paginate_simple_timestamp_keys(self):
-
- # See issue #4264
- page_query = self.CLIENT.query(kind="timestamp_key")
- iterator = page_query.fetch()
-
- seen = set()
- page_count = 0
- for page in iterator.pages:
- page_count += 1
- for entity in page:
- timestamp = entity.key.id
- self.assertNotIn(timestamp, seen, timestamp)
- seen.add(timestamp)
-
- self.assertTrue(page_count > 1)
-
- def test_query_offset_timestamp_keys(self):
- # See issue #4675
- max_all = 10000
- offset = 1
- max_offset = max_all - offset
- query = self.CLIENT.query(kind="timestamp_key")
- all_w_limit = list(query.fetch(limit=max_all))
- self.assertEqual(len(all_w_limit), max_all)
-
- offset_w_limit = list(query.fetch(offset=offset, limit=max_offset))
- self.assertEqual(offset_w_limit, all_w_limit[offset:])
-
- def test_query_paginate_with_offset(self):
- page_query = self._base_query()
- page_query.order = "appearances"
- offset = 2
- limit = 3
- iterator = page_query.fetch(limit=limit, offset=offset)
-
- # Fetch characters.
- page = next(iterator.pages)
- entities = list(page)
- cursor = iterator.next_page_token
- self.assertEqual(len(entities), limit)
- self.assertEqual(entities[0]["name"], "Robb")
- self.assertEqual(entities[1]["name"], "Bran")
- self.assertEqual(entities[2]["name"], "Catelyn")
-
- # Fetch next set of characters.
- new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor)
- entities = list(new_iterator)
- self.assertEqual(len(entities), limit)
- self.assertEqual(entities[0]["name"], "Sansa")
- self.assertEqual(entities[1]["name"], "Jon Snow")
- self.assertEqual(entities[2]["name"], "Arya")
-
- def test_query_paginate_with_start_cursor(self):
- page_query = self._base_query()
- page_query.order = "appearances"
- limit = 3
- offset = 2
- iterator = page_query.fetch(limit=limit, offset=offset)
-
- # Fetch characters.
- page = next(iterator.pages)
- entities = list(page)
- cursor = iterator.next_page_token
- self.assertEqual(len(entities), limit)
-
- # Use cursor to create a fresh query.
- fresh_query = self._base_query()
- fresh_query.order = "appearances"
-
- new_entities = list(fresh_query.fetch(start_cursor=cursor, limit=limit))
- characters_remaining = len(self.CHARACTERS) - limit - offset
- self.assertEqual(len(new_entities), characters_remaining)
- self.assertEqual(new_entities[0]["name"], "Sansa")
- self.assertEqual(new_entities[2]["name"], "Arya")
-
- def test_query_distinct_on(self):
- query = self._base_query()
- query.distinct_on = ["alive"]
-
- expected_matches = 2
- # We expect 2, but allow the query to get 1 extra.
- entities = list(query.fetch(limit=expected_matches + 1))
- self.assertEqual(len(entities), expected_matches)
-
- self.assertEqual(entities[0]["name"], "Catelyn")
- self.assertEqual(entities[1]["name"], "Arya")
-
-
-class TestDatastoreQueryOffsets(TestDatastore):
- TOTAL_OBJECTS = 2500
- NAMESPACE = "LargeCharacterEntity"
- KIND = "LargeCharacter"
-
- @classmethod
- def setUpClass(cls):
- cls.CLIENT = clone_client(Config.CLIENT)
- # Remove the namespace from the cloned client, since these
- # query tests rely on the entities to be already stored
- # cls.CLIENT.namespace = cls.NAMESPACE
- cls.CLIENT.namespace = None
-
- # Populating the datastore if necessary.
- populate_datastore.add_large_character_entities(client=cls.CLIENT)
-
- @classmethod
- def tearDownClass(cls):
- # In the emulator, destroy the query entities.
- if os.getenv(DATASTORE_DATASET) is not None:
- # Use the client for this test instead of the global.
- clear_datastore.remove_all_entities(client=cls.CLIENT)
-
- def _base_query(self):
- # Use the client for this test instead of the global.
- return self.CLIENT.query(kind=self.KIND, namespace=self.NAMESPACE)
-
- def _verify(self, limit, offset, expected):
- # Query used for all tests
- page_query = self._base_query()
- page_query.add_filter("family", "=", "Stark")
- page_query.add_filter("alive", "=", False)
-
- iterator = page_query.fetch(limit=limit, offset=offset)
- entities = [e for e in iterator]
- self.assertEqual(len(entities), expected)
-
- def test_query_in_bounds_offsets(self):
- # Verify that with no offset there are the correct # of results
- self._verify(limit=None, offset=None, expected=self.TOTAL_OBJECTS)
-
- # Verify that with no limit there are results (offset provided)")
- self._verify(limit=None, offset=900, expected=self.TOTAL_OBJECTS - 900)
-
- # Offset beyond items larger Verify 200 items found")
- self._verify(limit=200, offset=1100, expected=200)
-
- def test_query_partially_out_of_bounds_offsets(self):
- # Offset within range, expect 50 despite larger limit")
- self._verify(limit=100, offset=self.TOTAL_OBJECTS - 50, expected=50)
-
- def test_query_out_of_bounds_offsets(self):
- # Offset beyond items larger Verify no items found")
- self._verify(limit=200, offset=self.TOTAL_OBJECTS + 1000, expected=0)
-
-
-class TestDatastoreTransaction(TestDatastore):
- def test_transaction_via_with_statement(self):
- entity = datastore.Entity(key=Config.CLIENT.key("Company", "Google"))
- entity["url"] = u"www.google.com"
-
- with Config.CLIENT.transaction() as xact:
- result = Config.CLIENT.get(entity.key)
- if result is None:
- xact.put(entity)
- self.case_entities_to_delete.append(entity)
-
- # This will always return after the transaction.
- retrieved_entity = Config.CLIENT.get(entity.key)
- self.case_entities_to_delete.append(retrieved_entity)
- self.assertEqual(retrieved_entity, entity)
-
- def test_transaction_via_explicit_begin_get_commit(self):
- # See
- # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859
- # Note that this example lacks the threading which provokes the race
- # condition in that issue: we are basically just exercising the
- # "explict" path for using transactions.
- BEFORE_1 = 100
- BEFORE_2 = 0
- TRANSFER_AMOUNT = 40
- key1 = Config.CLIENT.key("account", "123")
- account1 = datastore.Entity(key=key1)
- account1["balance"] = BEFORE_1
- key2 = Config.CLIENT.key("account", "234")
- account2 = datastore.Entity(key=key2)
- account2["balance"] = BEFORE_2
- Config.CLIENT.put_multi([account1, account2])
- self.case_entities_to_delete.append(account1)
- self.case_entities_to_delete.append(account2)
-
- xact = Config.CLIENT.transaction()
- xact.begin()
- from_account = Config.CLIENT.get(key1, transaction=xact)
- to_account = Config.CLIENT.get(key2, transaction=xact)
- from_account["balance"] -= TRANSFER_AMOUNT
- to_account["balance"] += TRANSFER_AMOUNT
-
- xact.put(from_account)
- xact.put(to_account)
- xact.commit()
-
- after1 = Config.CLIENT.get(key1)
- after2 = Config.CLIENT.get(key2)
- self.assertEqual(after1["balance"], BEFORE_1 - TRANSFER_AMOUNT)
- self.assertEqual(after2["balance"], BEFORE_2 + TRANSFER_AMOUNT)
-
- def test_failure_with_contention(self):
- contention_prop_name = "baz"
- local_client = clone_client(Config.CLIENT)
-
- # Insert an entity which will be retrieved in a transaction
- # and updated outside it with a contentious value.
- key = local_client.key("BreakTxn", 1234)
- orig_entity = datastore.Entity(key=key)
- orig_entity["foo"] = u"bar"
- local_client.put(orig_entity)
- self.case_entities_to_delete.append(orig_entity)
-
- with self.assertRaises(Conflict):
- with local_client.transaction() as txn:
- entity_in_txn = local_client.get(key)
-
- # Update the original entity outside the transaction.
- orig_entity[contention_prop_name] = u"outside"
- Config.CLIENT.put(orig_entity)
-
- # Try to update the entity which we already updated outside the
- # transaction.
- entity_in_txn[contention_prop_name] = u"inside"
- txn.put(entity_in_txn)
-
- def test_empty_array_put(self):
- local_client = clone_client(Config.CLIENT)
-
- key = local_client.key("EmptyArray", 1234)
- local_client = datastore.Client()
- entity = datastore.Entity(key=key)
- entity["children"] = []
- local_client.put(entity)
- retrieved = local_client.get(entity.key)
-
- self.assertEqual(entity["children"], retrieved["children"])
diff --git a/tests/system/test_transaction.py b/tests/system/test_transaction.py
new file mode 100644
index 00000000..d27bc439
--- /dev/null
+++ b/tests/system/test_transaction.py
@@ -0,0 +1,106 @@
+# Copyright 2011 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.cloud import datastore
+from google.cloud.exceptions import Conflict
+
+from . import _helpers
+
+
+def test_transaction_via_with_statement(datastore_client, entities_to_delete):
+ key = datastore_client.key("Company", "Google")
+ entity = datastore.Entity(key=key)
+ entity["url"] = u"www.google.com"
+
+ with datastore_client.transaction() as xact:
+ result = datastore_client.get(entity.key)
+ if result is None:
+ xact.put(entity)
+ entities_to_delete.append(entity)
+
+ # This will always return after the transaction.
+ retrieved_entity = datastore_client.get(key)
+
+ entities_to_delete.append(retrieved_entity)
+ assert retrieved_entity == entity
+
+
+def test_transaction_via_explicit_begin_get_commit(
+ datastore_client, entities_to_delete,
+):
+ # See
+ # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859
+ # Note that this example lacks the threading which provokes the race
+ # condition in that issue: we are basically just exercising the
+ # "explict" path for using transactions.
+ before_1 = 100
+ before_2 = 0
+ transfer_amount = 40
+
+ key1 = datastore_client.key("account", "123")
+ account1 = datastore.Entity(key=key1)
+ account1["balance"] = before_1
+
+ key2 = datastore_client.key("account", "234")
+ account2 = datastore.Entity(key=key2)
+ account2["balance"] = before_2
+
+ datastore_client.put_multi([account1, account2])
+ entities_to_delete.append(account1)
+ entities_to_delete.append(account2)
+
+ xact = datastore_client.transaction()
+ xact.begin()
+ from_account = datastore_client.get(key1, transaction=xact)
+ to_account = datastore_client.get(key2, transaction=xact)
+ from_account["balance"] -= transfer_amount
+ to_account["balance"] += transfer_amount
+
+ xact.put(from_account)
+ xact.put(to_account)
+ xact.commit()
+
+ after1 = datastore_client.get(key1)
+ after2 = datastore_client.get(key2)
+ assert after1["balance"] == before_1 - transfer_amount
+ assert after2["balance"] == before_2 + transfer_amount
+
+
+def test_failure_with_contention(datastore_client, entities_to_delete):
+ contention_prop_name = "baz"
+ local_client = _helpers.clone_client(datastore_client)
+
+ # Insert an entity which will be retrieved in a transaction
+ # and updated outside it with a contentious value.
+ key = local_client.key("BreakTxn", 1234)
+ orig_entity = datastore.Entity(key=key)
+ orig_entity["foo"] = u"bar"
+ local_client.put(orig_entity)
+
+ entities_to_delete.append(orig_entity)
+
+ with pytest.raises(Conflict):
+ with local_client.transaction() as txn:
+ entity_in_txn = local_client.get(key)
+
+ # Update the original entity outside the transaction.
+ orig_entity[contention_prop_name] = u"outside"
+ datastore_client.put(orig_entity)
+
+ # Try to update the entity which we already updated outside the
+ # transaction.
+ entity_in_txn[contention_prop_name] = u"inside"
+ txn.put(entity_in_txn)
diff --git a/tests/system/utils/populate_datastore.py b/tests/system/utils/populate_datastore.py
index 06b2895a..52f453f6 100644
--- a/tests/system/utils/populate_datastore.py
+++ b/tests/system/utils/populate_datastore.py
@@ -54,6 +54,9 @@
{"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True},
{"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True},
)
+LARGE_CHARACTER_TOTAL_OBJECTS = 2500
+LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity"
+LARGE_CHARACTER_KIND = "LargeCharacter"
def print_func(message):
@@ -62,15 +65,14 @@ def print_func(message):
def add_large_character_entities(client=None):
- TOTAL_OBJECTS = 2500
- NAMESPACE = "LargeCharacterEntity"
- KIND = "LargeCharacter"
MAX_STRING = (string.ascii_lowercase * 58)[:1500]
- client.namespace = NAMESPACE
+ client.namespace = LARGE_CHARACTER_NAMESPACE
# Query used for all tests
- page_query = client.query(kind=KIND, namespace=NAMESPACE)
+ page_query = client.query(
+ kind=LARGE_CHARACTER_KIND, namespace=LARGE_CHARACTER_NAMESPACE
+ )
def put_objects(count):
current = 0
@@ -86,7 +88,7 @@ def put_objects(count):
for i in range(start, end):
name = "character{0:05d}".format(i)
# The Cloud Datastore key for the new entity
- task_key = client.key(KIND, name)
+ task_key = client.key(LARGE_CHARACTER_KIND, name)
# Prepares the new entity
task = datastore.Entity(key=task_key)
@@ -102,16 +104,16 @@ def put_objects(count):
current += ENTITIES_TO_BATCH
# Ensure we have 1500 entities for tests. If not, clean up type and add
- # new entities equal to TOTAL_OBJECTS
+ # new entities equal to LARGE_CHARACTER_TOTAL_OBJECTS
all_entities = [e for e in page_query.fetch()]
- if len(all_entities) != TOTAL_OBJECTS:
+ if len(all_entities) != LARGE_CHARACTER_TOTAL_OBJECTS:
# Cleanup Collection if not an exact match
while all_entities:
entities = all_entities[:500]
all_entities = all_entities[500:]
client.delete_multi([e.key for e in entities])
# Put objects
- put_objects(TOTAL_OBJECTS)
+ put_objects(LARGE_CHARACTER_TOTAL_OBJECTS)
def add_characters(client=None):