From 0b9e7937c16f17961e4d9f44ddabf5a03fef3047 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Wed, 7 May 2025 10:49:32 -0400 Subject: [PATCH 01/21] Fix typos in docs/source/topics/known-issues.rst --- docs/source/topics/known-issues.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index 2bf668075..5f5ed4a45 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -98,8 +98,8 @@ Caching :doc:`Database caching ` uses this library's :djadmin:`createcachecollection` command rather Django's SQL-specific -:djadmin:`createcachetable`. +:djadmin:`createcachetable` command. Secondly, you must use the :class:`django_mongodb_backend.cache.MongoDBCache` backend rather than Django's built-in database cache backend, -``django.core.cache.backends.db.DatabaseCache``). +``django.core.cache.backends.db.DatabaseCache``. From 4de75eb36b53e568d5a15dec663020bdc2ec3028 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Wed, 7 May 2025 10:54:56 -0400 Subject: [PATCH 02/21] Add "Known issues and limitations" link to the index --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index 3e16b83e7..bdd981625 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -14,6 +14,7 @@ First steps - :doc:`Installation ` - :doc:`Configuring a project ` - :doc:`howto/contrib-apps` +- :doc:`topics/known-issues` Getting help ============ From f11a3324db8fdc9f7814a684d84eb8a9fa8fe92b Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Sun, 20 Apr 2025 22:00:47 -0400 Subject: [PATCH 03/21] INTPYTHON-599 Make a field's custom lookups available in embedded model queries --- .../fields/embedded_model.py | 7 +++ docs/source/releases/5.1.x.rst | 10 +++++ tests/model_fields_/models.py | 2 + tests/model_fields_/test_embedded_model.py | 43 +++++++++++++++++++ 4 files changed, 62 insertions(+) diff --git a/django_mongodb_backend/fields/embedded_model.py b/django_mongodb_backend/fields/embedded_model.py index d9dd5b6cf..543c2db06 100644 --- a/django_mongodb_backend/fields/embedded_model.py +++ b/django_mongodb_backend/fields/embedded_model.py @@ -155,6 +155,9 @@ def __init__(self, key_name, ref_field, *args, **kwargs): self.key_name = str(key_name) self.ref_field = ref_field + def get_lookup(self, name): + return self.ref_field.get_lookup(name) + def get_transform(self, name): """ Validate that `name` is either a field of an embedded model or a @@ -204,6 +207,10 @@ def as_mql(self, compiler, connection): result = build_json_mql_path(result, json_key_transforms) return result + @property + def output_field(self): + return self.ref_field + class KeyTransformFactory: def __init__(self, key_name, ref_field): diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 30ac695df..58b153bb9 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -2,6 +2,16 @@ Django MongoDB Backend 5.1.x ============================ +5.1.0 beta 3 +============ + +*Unreleased* + +- Added support for a field's custom lookups in ``EmbeddedModelField``, e.g. + ``ArrayField``’s ``contains``, ``contained__by``, etc. + +.. _django-mongodb-backend-5.1.0-beta-2: + 5.1.0 beta 2 ============ diff --git a/tests/model_fields_/models.py b/tests/model_fields_/models.py index b25b94a1c..ad573323b 100644 --- a/tests/model_fields_/models.py +++ b/tests/model_fields_/models.py @@ -110,12 +110,14 @@ class Address(EmbeddedModel): city = models.CharField(max_length=20) state = models.CharField(max_length=2) zip_code = models.IntegerField(db_index=True) + tags = ArrayField(models.CharField(max_length=100), null=True, blank=True) class Author(EmbeddedModel): name = models.CharField(max_length=10) age = models.IntegerField() address = EmbeddedModelField(Address) + skills = ArrayField(models.CharField(max_length=100), null=True, blank=True) class Book(models.Model): diff --git a/tests/model_fields_/test_embedded_model.py b/tests/model_fields_/test_embedded_model.py index eee0dd1a9..6466857bd 100644 --- a/tests/model_fields_/test_embedded_model.py +++ b/tests/model_fields_/test_embedded_model.py @@ -186,6 +186,49 @@ def test_nested(self): self.assertCountEqual(Book.objects.filter(author__address__city="NYC"), [obj]) +class ArrayFieldTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.book = Book.objects.create( + author=Author( + name="Shakespeare", + age=55, + skills=["writing", "editing"], + address=Address(city="NYC", state="NY", tags=["home", "shipping"]), + ), + ) + + def test_contains(self): + self.assertCountEqual(Book.objects.filter(author__skills__contains=["nonexistent"]), []) + self.assertCountEqual( + Book.objects.filter(author__skills__contains=["writing"]), [self.book] + ) + # Nested + self.assertCountEqual( + Book.objects.filter(author__address__tags__contains=["nonexistent"]), [] + ) + self.assertCountEqual( + Book.objects.filter(author__address__tags__contains=["home"]), [self.book] + ) + + def test_contained_by(self): + self.assertCountEqual( + Book.objects.filter(author__skills__contained_by=["writing", "publishing"]), [] + ) + self.assertCountEqual( + Book.objects.filter(author__skills__contained_by=["writing", "editing", "publishing"]), + [self.book], + ) + # Nested + self.assertCountEqual( + Book.objects.filter(author__address__tags__contained_by=["home", "work"]), [] + ) + self.assertCountEqual( + Book.objects.filter(author__address__tags__contained_by=["home", "work", "shipping"]), + [self.book], + ) + + class InvalidLookupTests(SimpleTestCase): def test_invalid_field(self): msg = "Author has no field named 'first_name'" From 6437d3f812f4e0cb629972da179eb1b7035033ae Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Sat, 26 Apr 2025 19:50:44 -0400 Subject: [PATCH 04/21] INTPYTHON-599 Make a field's custom transforms available in embedded model queries --- .../fields/embedded_model.py | 53 ++++++------------- docs/source/releases/5.1.x.rst | 5 +- tests/model_fields_/test_embedded_model.py | 7 +++ 3 files changed, 26 insertions(+), 39 deletions(-) diff --git a/django_mongodb_backend/fields/embedded_model.py b/django_mongodb_backend/fields/embedded_model.py index 543c2db06..cf45eac8a 100644 --- a/django_mongodb_backend/fields/embedded_model.py +++ b/django_mongodb_backend/fields/embedded_model.py @@ -7,7 +7,6 @@ from django.db.models.lookups import Transform from .. import forms -from .json import build_json_mql_path class EmbeddedModelField(models.Field): @@ -163,49 +162,29 @@ def get_transform(self, name): Validate that `name` is either a field of an embedded model or a lookup on an embedded model's field. """ - result = None - if isinstance(self.ref_field, EmbeddedModelField): - opts = self.ref_field.embedded_model._meta - new_field = opts.get_field(name) - result = KeyTransformFactory(name, new_field) + if transform := self.ref_field.get_transform(name): + return transform + suggested_lookups = difflib.get_close_matches(name, self.ref_field.get_lookups()) + if suggested_lookups: + suggested_lookups = " or ".join(suggested_lookups) + suggestion = f", perhaps you meant {suggested_lookups}?" else: - if self.ref_field.get_transform(name) is None: - suggested_lookups = difflib.get_close_matches(name, self.ref_field.get_lookups()) - if suggested_lookups: - suggested_lookups = " or ".join(suggested_lookups) - suggestion = f", perhaps you meant {suggested_lookups}?" - else: - suggestion = "." - raise FieldDoesNotExist( - f"Unsupported lookup '{name}' for " - f"{self.ref_field.__class__.__name__} '{self.ref_field.name}'" - f"{suggestion}" - ) - result = KeyTransformFactory(name, self.ref_field) - return result + suggestion = "." + raise FieldDoesNotExist( + f"Unsupported lookup '{name}' for " + f"{self.ref_field.__class__.__name__} '{self.ref_field.name}'" + f"{suggestion}" + ) - def preprocess_lhs(self, compiler, connection): + def as_mql(self, compiler, connection): previous = self - embedded_key_transforms = [] - json_key_transforms = [] + key_transforms = [] while isinstance(previous, KeyTransform): - if isinstance(previous.ref_field, EmbeddedModelField): - embedded_key_transforms.insert(0, previous.key_name) - else: - json_key_transforms.insert(0, previous.key_name) + key_transforms.insert(0, previous.key_name) previous = previous.lhs mql = previous.as_mql(compiler, connection) - # The first json_key_transform is the field name. - embedded_key_transforms.append(json_key_transforms.pop(0)) - return mql, embedded_key_transforms, json_key_transforms - - def as_mql(self, compiler, connection): - mql, key_transforms, json_key_transforms = self.preprocess_lhs(compiler, connection) transforms = ".".join(key_transforms) - result = f"{mql}.{transforms}" - if json_key_transforms: - result = build_json_mql_path(result, json_key_transforms) - return result + return f"{mql}.{transforms}" @property def output_field(self): diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 58b153bb9..dde2e03a2 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -7,8 +7,9 @@ Django MongoDB Backend 5.1.x *Unreleased* -- Added support for a field's custom lookups in ``EmbeddedModelField``, e.g. - ``ArrayField``’s ``contains``, ``contained__by``, etc. +- Added support for a field's custom lookups and transforms in + ``EmbeddedModelField``, e.g. ``ArrayField``’s ``contains``, + ``contained__by``, ``len``, etc. .. _django-mongodb-backend-5.1.0-beta-2: diff --git a/tests/model_fields_/test_embedded_model.py b/tests/model_fields_/test_embedded_model.py index 6466857bd..700a3cf1c 100644 --- a/tests/model_fields_/test_embedded_model.py +++ b/tests/model_fields_/test_embedded_model.py @@ -228,6 +228,13 @@ def test_contained_by(self): [self.book], ) + def test_len(self): + self.assertCountEqual(Book.objects.filter(author__skills__len=1), []) + self.assertCountEqual(Book.objects.filter(author__skills__len=2), [self.book]) + # Nested + self.assertCountEqual(Book.objects.filter(author__address__tags__len=1), []) + self.assertCountEqual(Book.objects.filter(author__address__tags__len=2), [self.book]) + class InvalidLookupTests(SimpleTestCase): def test_invalid_field(self): From 87fa60c22c88646dfc8daa2c62281396deb983be Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Thu, 1 May 2025 22:08:34 -0400 Subject: [PATCH 05/21] Fix Trunc database function with tzinfo parameter --- django_mongodb_backend/features.py | 4 ---- django_mongodb_backend/functions.py | 32 ++++++++++++++++++++++++++++ django_mongodb_backend/operations.py | 14 +++++++++--- docs/source/releases/5.1.x.rst | 2 ++ docs/source/topics/known-issues.rst | 4 ---- 5 files changed, 45 insertions(+), 11 deletions(-) diff --git a/django_mongodb_backend/features.py b/django_mongodb_backend/features.py index a286a2cbf..793590901 100644 --- a/django_mongodb_backend/features.py +++ b/django_mongodb_backend/features.py @@ -58,10 +58,6 @@ class DatabaseFeatures(BaseDatabaseFeatures): "model_fields.test_jsonfield.TestQuerying.test_icontains", # MongoDB gives ROUND(365, -1)=360 instead of 370 like other databases. "db_functions.math.test_round.RoundTests.test_integer_with_negative_precision", - # Truncating in another timezone doesn't work becauase MongoDB converts - # the result back to UTC. - "db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_func_with_timezone", - "db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_timezone_applied_before_truncation", # Unexpected alias_refcount in alias_map. "queries.tests.Queries1Tests.test_order_by_tables", # The $sum aggregation returns 0 instead of None for null. diff --git a/django_mongodb_backend/functions.py b/django_mongodb_backend/functions.py index 464dddd6f..f8546f963 100644 --- a/django_mongodb_backend/functions.py +++ b/django_mongodb_backend/functions.py @@ -1,4 +1,8 @@ +from datetime import datetime + +from django.conf import settings from django.db import NotSupportedError +from django.db.models import DateField, DateTimeField, TimeField from django.db.models.expressions import Func from django.db.models.functions.comparison import Cast, Coalesce, Greatest, Least, NullIf from django.db.models.functions.datetime import ( @@ -195,6 +199,33 @@ def trunc(self, compiler, connection): return {"$dateTrunc": lhs_mql} +def trunc_convert_value(self, value, expression, connection): + if connection.vendor == "mongodb": + # A custom TruncBase.convert_value() for MongoDB. + if value is None: + return None + convert_to_tz = settings.USE_TZ and self.get_tzname() != "UTC" + if isinstance(self.output_field, DateTimeField): + if convert_to_tz: + # Unlike other databases, MongoDB returns the value in UTC, + # so rather than setting the time zone equal to self.tzinfo, + # the value must be converted to tzinfo. + value = value.astimezone(self.tzinfo) + elif isinstance(value, datetime): + if isinstance(self.output_field, DateField): + if convert_to_tz: + value = value.astimezone(self.tzinfo) + # Truncate for Trunc(..., output_field=DateField) + value = value.date() + elif isinstance(self.output_field, TimeField): + if convert_to_tz: + value = value.astimezone(self.tzinfo) + # Truncate for Trunc(..., output_field=TimeField) + value = value.time() + return value + return self.convert_value(value, expression, connection) + + def trunc_date(self, compiler, connection): # Cast to date rather than truncate to date. lhs_mql = process_lhs(self, compiler, connection) @@ -254,6 +285,7 @@ def register_functions(): Substr.as_mql = substr Trim.as_mql = trim("trim") TruncBase.as_mql = trunc + TruncBase.convert_value = trunc_convert_value TruncDate.as_mql = trunc_date TruncTime.as_mql = trunc_time Upper.as_mql = preserve_null("toUpper") diff --git a/django_mongodb_backend/operations.py b/django_mongodb_backend/operations.py index cb1e93db0..9138f06e4 100644 --- a/django_mongodb_backend/operations.py +++ b/django_mongodb_backend/operations.py @@ -10,7 +10,7 @@ from django.db.backends.base.operations import BaseDatabaseOperations from django.db.models import TextField from django.db.models.expressions import Combinable, Expression -from django.db.models.functions import Cast +from django.db.models.functions import Cast, Trunc from django.utils import timezone from django.utils.regex_helper import _lazy_re_compile @@ -97,7 +97,11 @@ def get_db_converters(self, expression): ] ) elif internal_type == "DateField": - converters.append(self.convert_datefield_value) + # Trunc(... output_field="DateField") values must remain datetime + # until Trunc.convert_value() so they can be converted from UTC + # before truncation. + if not isinstance(expression, Trunc): + converters.append(self.convert_datefield_value) elif internal_type == "DateTimeField": if settings.USE_TZ: converters.append(self.convert_datetimefield_value) @@ -106,7 +110,11 @@ def get_db_converters(self, expression): elif internal_type == "JSONField": converters.append(self.convert_jsonfield_value) elif internal_type == "TimeField": - converters.append(self.convert_timefield_value) + # Trunc(... output_field="TimeField") values must remain datetime + # until Trunc.convert_value() so they can be converted from UTC + # before truncation. + if not isinstance(expression, Trunc): + converters.append(self.convert_timefield_value) elif internal_type == "UUIDField": converters.append(self.convert_uuidfield_value) return converters diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index dde2e03a2..ce3a5877b 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -10,6 +10,8 @@ Django MongoDB Backend 5.1.x - Added support for a field's custom lookups and transforms in ``EmbeddedModelField``, e.g. ``ArrayField``’s ``contains``, ``contained__by``, ``len``, etc. +- Fixed the results of queries that use the ``tzinfo`` parameter of the + ``Trunc`` database functions. .. _django-mongodb-backend-5.1.0-beta-2: diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index 5f5ed4a45..4848d3bad 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -74,10 +74,6 @@ Database functions :class:`~django.db.models.functions.SHA512` - :class:`~django.db.models.functions.Sign` -- The ``tzinfo`` parameter of the :class:`~django.db.models.functions.Trunc` - database functions doesn't work properly because MongoDB converts the result - back to UTC. - Transaction management ====================== From ac281218e0555512a32bb476e3bffa888b81c34f Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Fri, 2 May 2025 20:32:41 -0400 Subject: [PATCH 06/21] Document the tzinfo parameter of TruncDate/TruncTime as unsupported Add the same exception raising from TruncDate to TruncTime and add tests for both functions. --- django_mongodb_backend/functions.py | 3 +++ docs/source/topics/known-issues.rst | 5 +++++ tests/db_functions_/models.py | 5 +++++ tests/db_functions_/test_datetime.py | 26 ++++++++++++++++++++++++++ 4 files changed, 39 insertions(+) create mode 100644 tests/db_functions_/models.py create mode 100644 tests/db_functions_/test_datetime.py diff --git a/django_mongodb_backend/functions.py b/django_mongodb_backend/functions.py index f8546f963..9405e8711 100644 --- a/django_mongodb_backend/functions.py +++ b/django_mongodb_backend/functions.py @@ -248,6 +248,9 @@ def trunc_date(self, compiler, connection): def trunc_time(self, compiler, connection): + tzname = self.get_tzname() + if tzname and tzname != "UTC": + raise NotSupportedError(f"TruncTime with tzinfo ({tzname}) isn't supported on MongoDB.") lhs_mql = process_lhs(self, compiler, connection) return { "$dateFromString": { diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index 4848d3bad..e8c0a5534 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -74,6 +74,11 @@ Database functions :class:`~django.db.models.functions.SHA512` - :class:`~django.db.models.functions.Sign` +- The ``tzinfo`` parameter of the + :class:`~django.db.models.functions.TruncDate` and + :class:`~django.db.models.functions.TruncTime` database functions isn't + supported. + Transaction management ====================== diff --git a/tests/db_functions_/models.py b/tests/db_functions_/models.py new file mode 100644 index 000000000..17b9ad1a7 --- /dev/null +++ b/tests/db_functions_/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class DTModel(models.Model): + start_datetime = models.DateTimeField(null=True, blank=True) diff --git a/tests/db_functions_/test_datetime.py b/tests/db_functions_/test_datetime.py new file mode 100644 index 000000000..e0df28801 --- /dev/null +++ b/tests/db_functions_/test_datetime.py @@ -0,0 +1,26 @@ +from zoneinfo import ZoneInfo + +from django.db import NotSupportedError +from django.db.models.functions import TruncDate, TruncTime +from django.test import TestCase, override_settings + +from .models import DTModel + + +@override_settings(USE_TZ=True) +class TruncTests(TestCase): + melb = ZoneInfo("Australia/Melbourne") + + def test_truncdate_tzinfo(self): + msg = "TruncDate with tzinfo (Australia/Melbourne) isn't supported on MongoDB." + with self.assertRaisesMessage(NotSupportedError, msg): + DTModel.objects.annotate( + melb_date=TruncDate("start_datetime", tzinfo=self.melb), + ).get() + + def test_trunctime_tzinfo(self): + msg = "TruncTime with tzinfo (Australia/Melbourne) isn't supported on MongoDB." + with self.assertRaisesMessage(NotSupportedError, msg): + DTModel.objects.annotate( + melb_date=TruncTime("start_datetime", tzinfo=self.melb), + ).get() From 41cc35f39eeda34a8f522e56741b8c6f2986f10f Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Thu, 10 Apr 2025 21:23:05 -0400 Subject: [PATCH 07/21] Add support for QuerySet.dates() and datetimes() This was blocked on support for QuerySet.distinct() which was added in e04056ed9ed5000c2427ecdef0db50a1649fe3a6. --- django_mongodb_backend/compiler.py | 9 ---- django_mongodb_backend/features.py | 76 +--------------------------- docs/source/ref/models/querysets.rst | 2 - docs/source/releases/5.1.x.rst | 1 + docs/source/topics/known-issues.rst | 2 - 5 files changed, 2 insertions(+), 88 deletions(-) diff --git a/django_mongodb_backend/compiler.py b/django_mongodb_backend/compiler.py index cf666619a..1b4882eb4 100644 --- a/django_mongodb_backend/compiler.py +++ b/django_mongodb_backend/compiler.py @@ -332,15 +332,6 @@ def cursor_iter(self, cursor, chunk_size, columns): def check_query(self): """Check if the current query is supported by the database.""" - if self.query.distinct: - # This is a heuristic to detect QuerySet.datetimes() and dates(). - # "datetimefield" and "datefield" are the names of the annotations - # the methods use. A user could annotate with the same names which - # would give an incorrect error message. - if "datetimefield" in self.query.annotations: - raise NotSupportedError("QuerySet.datetimes() is not supported on MongoDB.") - if "datefield" in self.query.annotations: - raise NotSupportedError("QuerySet.dates() is not supported on MongoDB.") if self.query.extra: if any(key.startswith("_prefetch_related_") for key in self.query.extra): raise NotSupportedError("QuerySet.prefetch_related() is not supported on MongoDB.") diff --git a/django_mongodb_backend/features.py b/django_mongodb_backend/features.py index 793590901..e6fc07775 100644 --- a/django_mongodb_backend/features.py +++ b/django_mongodb_backend/features.py @@ -268,81 +268,6 @@ def django_test_expected_failures(self): "update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation", "update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation_desc", }, - "QuerySet.dates() is not supported on MongoDB.": { - "admin_changelist.tests.ChangeListTests.test_computed_list_display_localization", - "admin_changelist.tests.ChangeListTests.test_object_tools_displayed_no_add_permission", - "admin_views.tests.AdminViewBasicTest.test_change_list_sorting_override_model_admin", - "admin_views.tests.AdminViewBasicTest.test_multiple_sort_same_field", - "admin_views.tests.AdminViewListEditable.test_inheritance", - "admin_views.tests.CSSTest.test_changelist_field_classes", - "admin_views.tests.DateHierarchyTests", - "aggregation.tests.AggregateTestCase.test_dates_with_aggregation", - "annotations.tests.AliasTests.test_dates_alias", - "aggregation_regress.tests.AggregationTests.test_more_more_more2", - "backends.tests.DateQuotingTest.test_django_date_trunc", - "dates.tests.DatesTests.test_dates_trunc_datetime_fields", - "dates.tests.DatesTests.test_related_model_traverse", - "generic_views.test_dates.ArchiveIndexViewTests.test_allow_empty_archive_view", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_by_month", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_context_object_name", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_custom_sorting", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_custom_sorting_dec", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_template", - "generic_views.test_dates.ArchiveIndexViewTests.test_archive_view_template_suffix", - "generic_views.test_dates.ArchiveIndexViewTests.test_date_list_order", - "generic_views.test_dates.ArchiveIndexViewTests.test_no_duplicate_query", - "generic_views.test_dates.ArchiveIndexViewTests.test_paginated_archive_view", - "generic_views.test_dates.ArchiveIndexViewTests.test_paginated_archive_view_does_not_load_entire_table", - "generic_views.test_dates.MonthArchiveViewTests.test_custom_month_format", - "generic_views.test_dates.MonthArchiveViewTests.test_date_list_order", - "generic_views.test_dates.MonthArchiveViewTests.test_month_view", - "generic_views.test_dates.MonthArchiveViewTests.test_month_view_allow_empty", - "generic_views.test_dates.MonthArchiveViewTests.test_month_view_allow_future", - "generic_views.test_dates.MonthArchiveViewTests.test_month_view_get_month_from_request", - "generic_views.test_dates.MonthArchiveViewTests.test_month_view_paginated", - "generic_views.test_dates.MonthArchiveViewTests.test_previous_month_without_content", - "generic_views.test_dates.YearArchiveViewTests.test_date_list_order", - "generic_views.test_dates.YearArchiveViewTests.test_get_context_data_receives_extra_context", - "generic_views.test_dates.YearArchiveViewTests.test_no_duplicate_query", - "generic_views.test_dates.YearArchiveViewTests.test_year_view", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_allow_future", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_custom_sort_order", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_empty", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_make_object_list", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_paginated", - "generic_views.test_dates.YearArchiveViewTests.test_year_view_two_custom_sort_orders", - "many_to_one.tests.ManyToOneTests.test_select_related", - "model_regress.tests.ModelTests.test_date_filter_null", - "reserved_names.tests.ReservedNameTests.test_dates", - "queryset_pickle.tests.PickleabilityTestCase.test_specialized_queryset", - }, - "QuerySet.datetimes() is not supported on MongoDB.": { - "admin_views.test_templatetags.DateHierarchyTests", - "admin_views.test_templatetags.AdminTemplateTagsTest.test_override_change_list_template_tags", - "admin_views.tests.AdminViewBasicTest.test_date_hierarchy_empty_queryset", - "admin_views.tests.AdminViewBasicTest.test_date_hierarchy_local_date_differ_from_utc", - "admin_views.tests.AdminViewBasicTest.test_date_hierarchy_timezone_dst", - "annotations.tests.AliasTests.test_datetimes_alias", - "datetimes.tests.DateTimesTests.test_21432", - "datetimes.tests.DateTimesTests.test_datetimes_has_lazy_iterator", - "datetimes.tests.DateTimesTests.test_datetimes_returns_available_dates_for_given_scope_and_given_field", - "datetimes.tests.DateTimesTests.test_related_model_traverse", - "generic_views.test_dates.ArchiveIndexViewTests.test_aware_datetime_archive_view", - "generic_views.test_dates.ArchiveIndexViewTests.test_datetime_archive_view", - "generic_views.test_dates.MonthArchiveViewTests.test_aware_datetime_month_view", - "generic_views.test_dates.MonthArchiveViewTests.test_datetime_month_view", - "generic_views.test_dates.YearArchiveViewTests.test_aware_datetime_year_view", - "generic_views.test_dates.YearArchiveViewTests.test_datetime_year_view", - "model_inheritance_regress.tests.ModelInheritanceTest.test_issue_7105", - "queries.tests.Queries1Tests.test_ticket7155", - "queries.tests.Queries1Tests.test_ticket7791", - "queries.tests.Queries1Tests.test_tickets_6180_6203", - "queries.tests.Queries1Tests.test_tickets_7087_12242", - "timezones.tests.LegacyDatabaseTests.test_query_datetimes", - "timezones.tests.NewDatabaseTests.test_query_datetimes", - "timezones.tests.NewDatabaseTests.test_query_datetimes_in_other_timezone", - }, "QuerySet.extra() is not supported.": { "aggregation.tests.AggregateTestCase.test_exists_extra_where_with_aggregate", "annotations.tests.NonAggregateAnnotationTestCase.test_column_field_ordering", @@ -362,6 +287,7 @@ def django_test_expected_failures(self): "queries.test_qs_combinators.QuerySetSetOperationTests.test_union_with_extra_and_values_list", "queries.tests.EscapingTests.test_ticket_7302", "queries.tests.Queries1Tests.test_tickets_1878_2939", + "queries.tests.Queries1Tests.test_tickets_7087_12242", "queries.tests.Queries5Tests.test_extra_select_literal_percent_s", "queries.tests.Queries5Tests.test_ticket7256", "queries.tests.ValuesQuerysetTests.test_extra_multiple_select_params_values_order_by", diff --git a/docs/source/ref/models/querysets.rst b/docs/source/ref/models/querysets.rst index a0e32957c..b1cfbf8b7 100644 --- a/docs/source/ref/models/querysets.rst +++ b/docs/source/ref/models/querysets.rst @@ -9,8 +9,6 @@ All of Django's :doc:`QuerySet methods ` are supported, except: - :meth:`bulk_update() ` - - :meth:`dates() ` - - :meth:`datetimes() ` - :meth:`extra() ` - :meth:`prefetch_related() ` diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index ce3a5877b..3e164fa74 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -12,6 +12,7 @@ Django MongoDB Backend 5.1.x ``contained__by``, ``len``, etc. - Fixed the results of queries that use the ``tzinfo`` parameter of the ``Trunc`` database functions. +- Added support for ``QuerySet.dates()`` and ``datetimes()``. .. _django-mongodb-backend-5.1.0-beta-2: diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index e8c0a5534..0ec4fc1b8 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -31,8 +31,6 @@ Querying - The following ``QuerySet`` methods aren't supported: - :meth:`bulk_update() ` - - :meth:`dates() ` - - :meth:`datetimes() ` - :meth:`extra() ` - :meth:`prefetch_related() ` From 34abcc47164f18ea97dcc8190f73e26e3f3d0656 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Fri, 25 Apr 2025 21:46:36 -0400 Subject: [PATCH 08/21] INTPYTHON-602 Fix QuerySet results of embedded model fields that use database converters --- django_mongodb_backend/operations.py | 12 +++++++++++ docs/source/releases/5.1.x.rst | 3 +++ tests/model_fields_/models.py | 6 ++++++ tests/model_fields_/test_embedded_model.py | 25 +++++++++++++++++++++- 4 files changed, 45 insertions(+), 1 deletion(-) diff --git a/django_mongodb_backend/operations.py b/django_mongodb_backend/operations.py index 9138f06e4..bdf41bcf9 100644 --- a/django_mongodb_backend/operations.py +++ b/django_mongodb_backend/operations.py @@ -107,6 +107,8 @@ def get_db_converters(self, expression): converters.append(self.convert_datetimefield_value) elif internal_type == "DecimalField": converters.append(self.convert_decimalfield_value) + elif internal_type == "EmbeddedModelField": + converters.append(self.convert_embeddedmodelfield_value) elif internal_type == "JSONField": converters.append(self.convert_jsonfield_value) elif internal_type == "TimeField": @@ -150,6 +152,16 @@ def convert_durationfield_value(self, value, expression, connection): value = datetime.timedelta(milliseconds=int(str(value))) return value + def convert_embeddedmodelfield_value(self, value, expression, connection): + if value is not None: + # Apply database converters to each field of the embedded model. + for field in expression.output_field.embedded_model._meta.fields: + field_expr = Expression(output_field=field) + converters = connection.ops.get_db_converters(field_expr) + for converter in converters: + value[field.attname] = converter(value[field.attname], field_expr, connection) + return value + def convert_jsonfield_value(self, value, expression, connection): """ Convert dict data to a string so that JSONField.from_db_value() can diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 3e164fa74..fac35eb16 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -13,6 +13,9 @@ Django MongoDB Backend 5.1.x - Fixed the results of queries that use the ``tzinfo`` parameter of the ``Trunc`` database functions. - Added support for ``QuerySet.dates()`` and ``datetimes()``. +- Fixed loading of ``QuerySet`` results for embedded models that have fields + that use database converters. For example, a crash for ``DecimalField``: + ``ValidationError: ['“1” value must be a decimal number.']``). .. _django-mongodb-backend-5.1.0-beta-2: diff --git a/tests/model_fields_/models.py b/tests/model_fields_/models.py index ad573323b..2470f4bb8 100644 --- a/tests/model_fields_/models.py +++ b/tests/model_fields_/models.py @@ -104,6 +104,12 @@ class Data(EmbeddedModel): auto_now = models.DateTimeField(auto_now=True) auto_now_add = models.DateTimeField(auto_now_add=True) json_value = models.JSONField() + decimal = models.DecimalField(max_digits=9, decimal_places="2", null=True, blank=True) + nested_data = EmbeddedModelField("NestedData", null=True, blank=True) + + +class NestedData(EmbeddedModel): + decimal = models.DecimalField(max_digits=9, decimal_places="2", null=True, blank=True) class Address(EmbeddedModel): diff --git a/tests/model_fields_/test_embedded_model.py b/tests/model_fields_/test_embedded_model.py index 700a3cf1c..004eae00d 100644 --- a/tests/model_fields_/test_embedded_model.py +++ b/tests/model_fields_/test_embedded_model.py @@ -24,6 +24,7 @@ Data, Holder, Library, + NestedData, ) from .utils import truncate_ms @@ -93,7 +94,16 @@ def test_pre_save(self): class QueryingTests(TestCase): @classmethod def setUpTestData(cls): - cls.objs = [Holder.objects.create(data=Data(integer=x)) for x in range(6)] + cls.objs = [ + Holder.objects.create( + data=Data( + integer=x, + decimal=f"{x}.5", + nested_data=NestedData(decimal=f"{x}.5"), + ) + ) + for x in range(6) + ] def test_exact(self): self.assertCountEqual(Holder.objects.filter(data__integer=3), [self.objs[3]]) @@ -113,6 +123,19 @@ def test_gte(self): def test_range(self): self.assertCountEqual(Holder.objects.filter(data__integer__range=(2, 4)), self.objs[2:5]) + def test_exact_decimal(self): + # EmbeddedModelField lookups call + # DatabaseOperations.adapt__field_value(). + self.assertCountEqual(Holder.objects.filter(data__decimal="3.5"), [self.objs[3]]) + + def test_lt_decimal(self): + self.assertCountEqual(Holder.objects.filter(data__decimal__lt="3"), self.objs[0:3]) + + def test_exact_decimal_nested(self): + self.assertCountEqual( + Holder.objects.filter(data__nested_data__decimal="3.5"), [self.objs[3]] + ) + def test_order_by_embedded_field(self): qs = Holder.objects.filter(data__integer__gt=3).order_by("-data__integer") self.assertSequenceEqual(qs, list(reversed(self.objs[4:]))) From cdacc51cac29ecd5e7d4e6de07765609709b6cfd Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Tue, 6 May 2025 09:37:12 -0400 Subject: [PATCH 09/21] INTPYTHON-602 Fix QuerySet results of embedded model fields that have field converters Follow up to 4633a9eefd414b83f1b20b09194d5e96375c9b4d. --- django_mongodb_backend/operations.py | 4 +++- tests/model_fields_/test_embedded_model.py | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/django_mongodb_backend/operations.py b/django_mongodb_backend/operations.py index bdf41bcf9..832c1db52 100644 --- a/django_mongodb_backend/operations.py +++ b/django_mongodb_backend/operations.py @@ -157,7 +157,9 @@ def convert_embeddedmodelfield_value(self, value, expression, connection): # Apply database converters to each field of the embedded model. for field in expression.output_field.embedded_model._meta.fields: field_expr = Expression(output_field=field) - converters = connection.ops.get_db_converters(field_expr) + converters = connection.ops.get_db_converters( + field_expr + ) + field_expr.get_db_converters(connection) for converter in converters: value[field.attname] = converter(value[field.attname], field_expr, connection) return value diff --git a/tests/model_fields_/test_embedded_model.py b/tests/model_fields_/test_embedded_model.py index 004eae00d..ec9f9dfc4 100644 --- a/tests/model_fields_/test_embedded_model.py +++ b/tests/model_fields_/test_embedded_model.py @@ -71,6 +71,11 @@ def test_save_load_null(self): obj = Holder.objects.get() self.assertIsNone(obj.data) + def test_save_load_json(self): + obj = Holder.objects.create(data=Data(json_value={"a": 1})) + obj.refresh_from_db() + self.assertEqual(obj.data.json_value, {"a": 1}) + def test_pre_save(self): """Field.pre_save() is called on embedded model fields.""" obj = Holder.objects.create(data=Data()) From 5726c74f4d9974ee757106f4d149f0192aecdd18 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Wed, 7 May 2025 10:34:47 -0400 Subject: [PATCH 10/21] Correct docs: QuerySet.bulk_update() is supported Added in 97769ba88e6951d8e343f8c3abeb8978d49b6f01. --- docs/source/ref/models/querysets.rst | 1 - docs/source/topics/known-issues.rst | 1 - 2 files changed, 2 deletions(-) diff --git a/docs/source/ref/models/querysets.rst b/docs/source/ref/models/querysets.rst index b1cfbf8b7..601e79d10 100644 --- a/docs/source/ref/models/querysets.rst +++ b/docs/source/ref/models/querysets.rst @@ -8,7 +8,6 @@ Supported ``QuerySet`` methods All of Django's :doc:`QuerySet methods ` are supported, except: - - :meth:`bulk_update() ` - :meth:`extra() ` - :meth:`prefetch_related() ` diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index 0ec4fc1b8..4779eb782 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -30,7 +30,6 @@ Querying - The following ``QuerySet`` methods aren't supported: - - :meth:`bulk_update() ` - :meth:`extra() ` - :meth:`prefetch_related() ` From a7a05569e878b89f1b31be88aa0239caf4dfc9c4 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Mon, 12 May 2025 08:56:35 -0400 Subject: [PATCH 11/21] Bump version to 5.1.0b3 --- django_mongodb_backend/__init__.py | 2 +- docs/source/releases/5.1.x.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/django_mongodb_backend/__init__.py b/django_mongodb_backend/__init__.py index bc2eb46f6..3e5bbaf04 100644 --- a/django_mongodb_backend/__init__.py +++ b/django_mongodb_backend/__init__.py @@ -1,4 +1,4 @@ -__version__ = "5.1.0b3.dev0" +__version__ = "5.1.0b3" # Check Django compatibility before other imports which may fail if the # wrong version of Django is installed. diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index fac35eb16..1bb715aa8 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -5,7 +5,7 @@ Django MongoDB Backend 5.1.x 5.1.0 beta 3 ============ -*Unreleased* +*May 13, 2025* - Added support for a field's custom lookups and transforms in ``EmbeddedModelField``, e.g. ``ArrayField``’s ``contains``, From b05c0100cd1cf3143fa51f2b8ffd0a017bec76a9 Mon Sep 17 00:00:00 2001 From: Jib Date: Wed, 14 May 2025 16:25:06 -0400 Subject: [PATCH 12/21] Fix release date --- docs/source/releases/5.1.x.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 1bb715aa8..e89e4e73f 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -5,7 +5,7 @@ Django MongoDB Backend 5.1.x 5.1.0 beta 3 ============ -*May 13, 2025* +*May 14, 2025* - Added support for a field's custom lookups and transforms in ``EmbeddedModelField``, e.g. ``ArrayField``’s ``contains``, From 353ff6a6fc0cdf640a22a05a335a5d3ca839f529 Mon Sep 17 00:00:00 2001 From: "mongodb-dbx-release-bot[bot]" <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> Date: Wed, 14 May 2025 20:32:14 +0000 Subject: [PATCH 13/21] BUMP 5.1.0b4.dev0 Signed-off-by: mongodb-dbx-release-bot[bot] <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> --- django_mongodb_backend/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django_mongodb_backend/__init__.py b/django_mongodb_backend/__init__.py index 3e5bbaf04..c448b7a30 100644 --- a/django_mongodb_backend/__init__.py +++ b/django_mongodb_backend/__init__.py @@ -1,4 +1,4 @@ -__version__ = "5.1.0b3" +__version__ = "5.1.0b4.dev0" # Check Django compatibility before other imports which may fail if the # wrong version of Django is installed. From b42bfbee8d660dc12cd75271e8544275386403f1 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Fri, 4 Jul 2025 15:07:25 -0400 Subject: [PATCH 14/21] PYTHON-5430 Use the zizmor action --- .github/workflows/zizmor.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 0fbdbd6dd..aca6da55d 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -17,16 +17,5 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: Setup Rust - uses: actions-rust-lang/setup-rust-toolchain@v1 - - name: Get zizmor - run: cargo install zizmor - name: Run zizmor - run: zizmor --format sarif . > results.sarif - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: results.sarif - category: zizmor + uses: zizmorcore/zizmor-action@1c7106082dbc1753372e3924b7da1b9417011a21 From 19a4a808ef3e4f98aed642835ea984bee47f64d9 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Thu, 15 May 2025 21:29:25 -0400 Subject: [PATCH 15/21] Fix database converter crash when loading models with a null ArrayField value --- django_mongodb_backend/operations.py | 2 ++ docs/source/releases/5.1.x.rst | 8 ++++++++ tests/model_fields_/models.py | 7 ++++++- tests/model_fields_/test_arrayfield.py | 3 ++- 4 files changed, 18 insertions(+), 2 deletions(-) diff --git a/django_mongodb_backend/operations.py b/django_mongodb_backend/operations.py index 832c1db52..4b54a3525 100644 --- a/django_mongodb_backend/operations.py +++ b/django_mongodb_backend/operations.py @@ -80,6 +80,8 @@ def adapt_timefield_value(self, value): def _get_arrayfield_converter(self, converter, *args, **kwargs): # Return a database converter that can be applied to a list of values. def convert_value(value, expression, connection): + if value is None: + return None return [converter(x, expression, connection) for x in value] return convert_value diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index e89e4e73f..67d988859 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -2,6 +2,14 @@ Django MongoDB Backend 5.1.x ============================ +5.1.0 beta 4 +============ + +*Unreleased* + +- Fixed crash when loading models with a null value for ``ArrayField``\s where + the ``base_field`` uses a database converter. + 5.1.0 beta 3 ============ diff --git a/tests/model_fields_/models.py b/tests/model_fields_/models.py index 2470f4bb8..9a4efc898 100644 --- a/tests/model_fields_/models.py +++ b/tests/model_fields_/models.py @@ -80,7 +80,12 @@ class NestedIntegerArrayModel(models.Model): class OtherTypesArrayModel(models.Model): ips = ArrayField(models.GenericIPAddressField(), default=list) uuids = ArrayField(models.UUIDField(), default=list) - decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2), default=list) + decimals = ArrayField( + models.DecimalField(max_digits=5, decimal_places=2), + default=list, + null=True, + blank=True, + ) tags = ArrayField(TagField(), blank=True, null=True) json = ArrayField(models.JSONField(default=dict), default=list) diff --git a/tests/model_fields_/test_arrayfield.py b/tests/model_fields_/test_arrayfield.py index e3dd4e594..aecfe0074 100644 --- a/tests/model_fields_/test_arrayfield.py +++ b/tests/model_fields_/test_arrayfield.py @@ -195,10 +195,11 @@ def test_null_from_db_value_handling(self): instance = OtherTypesArrayModel.objects.create( ips=["192.168.0.1", "::1"], uuids=[uuid.uuid4()], - decimals=[decimal.Decimal(1.25), 1.75], + decimals=None, tags=None, ) instance.refresh_from_db() + self.assertIsNone(instance.decimals) self.assertIsNone(instance.tags) self.assertEqual(instance.json, []) From 79e78f6f21f1aa19d3c79e33ca69cdda92486a7f Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Thu, 17 Jul 2025 09:11:22 -0400 Subject: [PATCH 16/21] Fix Trunc functions RecursionError crash on non-MongoDB databases --- django_mongodb_backend/functions.py | 5 ++++- docs/source/releases/5.1.x.rst | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/django_mongodb_backend/functions.py b/django_mongodb_backend/functions.py index 9405e8711..c27af25bc 100644 --- a/django_mongodb_backend/functions.py +++ b/django_mongodb_backend/functions.py @@ -199,6 +199,9 @@ def trunc(self, compiler, connection): return {"$dateTrunc": lhs_mql} +_trunc_convert_value = TruncBase.convert_value + + def trunc_convert_value(self, value, expression, connection): if connection.vendor == "mongodb": # A custom TruncBase.convert_value() for MongoDB. @@ -223,7 +226,7 @@ def trunc_convert_value(self, value, expression, connection): # Truncate for Trunc(..., output_field=TimeField) value = value.time() return value - return self.convert_value(value, expression, connection) + return _trunc_convert_value(self, value, expression, connection) def trunc_date(self, compiler, connection): diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 67d988859..8799ceef0 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -9,6 +9,8 @@ Django MongoDB Backend 5.1.x - Fixed crash when loading models with a null value for ``ArrayField``\s where the ``base_field`` uses a database converter. +- Fixed ``RecursionError`` when using ``Trunc`` database functions on non-MongoDB + databases. 5.1.0 beta 3 ============ From b722252f9375a146d87e4693a98b6482fad55061 Mon Sep 17 00:00:00 2001 From: Jib Date: Fri, 25 Jul 2025 21:49:03 -0400 Subject: [PATCH 17/21] Improve QuerySet performance by removing limit on server-side chunking (#347) --- django_mongodb_backend/compiler.py | 1 - docs/source/releases/5.1.x.rst | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/django_mongodb_backend/compiler.py b/django_mongodb_backend/compiler.py index 1b4882eb4..8df8d9953 100644 --- a/django_mongodb_backend/compiler.py +++ b/django_mongodb_backend/compiler.py @@ -260,7 +260,6 @@ def execute_sql( else: return self._make_result(obj, self.columns) # result_type is MULTI - cursor.batch_size(chunk_size) result = self.cursor_iter(cursor, chunk_size, self.columns) if not chunked_fetch: # If using non-chunked reads, read data into memory. diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 8799ceef0..b2ca4c180 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -11,6 +11,7 @@ Django MongoDB Backend 5.1.x the ``base_field`` uses a database converter. - Fixed ``RecursionError`` when using ``Trunc`` database functions on non-MongoDB databases. +- Improved ``QuerySet`` performance by removing low limit on server-side chunking. 5.1.0 beta 3 ============ From e9a3021fe3e2c9e2988c5c9fe0ced7e901cad00b Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Tue, 29 Jul 2025 14:30:56 -0400 Subject: [PATCH 18/21] INTPYTHON-451 Remove database caching support This reverts commit 64b1c101301da03aea684dd5ae2073cf2c84ae49. --- django_mongodb_backend/cache.py | 216 ---- django_mongodb_backend/creation.py | 13 - django_mongodb_backend/management/__init__.py | 0 .../management/commands/__init__.py | 0 .../commands/createcachecollection.py | 50 - docs/source/index.rst | 5 - docs/source/ref/django-admin.rst | 28 - docs/source/ref/index.rst | 1 - docs/source/releases/5.1.x.rst | 6 +- docs/source/topics/cache.rst | 61 - docs/source/topics/index.rst | 1 - docs/source/topics/known-issues.rst | 10 +- tests/cache_/__init__.py | 0 tests/cache_/models.py | 13 - tests/cache_/tests.py | 1000 ----------------- 15 files changed, 8 insertions(+), 1396 deletions(-) delete mode 100644 django_mongodb_backend/cache.py delete mode 100644 django_mongodb_backend/management/__init__.py delete mode 100644 django_mongodb_backend/management/commands/__init__.py delete mode 100644 django_mongodb_backend/management/commands/createcachecollection.py delete mode 100644 docs/source/ref/django-admin.rst delete mode 100644 docs/source/topics/cache.rst delete mode 100644 tests/cache_/__init__.py delete mode 100644 tests/cache_/models.py delete mode 100644 tests/cache_/tests.py diff --git a/django_mongodb_backend/cache.py b/django_mongodb_backend/cache.py deleted file mode 100644 index 00b903afe..000000000 --- a/django_mongodb_backend/cache.py +++ /dev/null @@ -1,216 +0,0 @@ -import pickle -from datetime import datetime, timezone - -from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache -from django.core.cache.backends.db import Options -from django.db import connections, router -from django.utils.functional import cached_property -from pymongo import ASCENDING, DESCENDING, IndexModel, ReturnDocument -from pymongo.errors import DuplicateKeyError, OperationFailure - - -class MongoSerializer: - def __init__(self, protocol=None): - self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol - - def dumps(self, obj): - # For better incr() and decr() atomicity, don't pickle integers. - # Using type() rather than isinstance() matches only integers and not - # subclasses like bool. - if type(obj) is int: # noqa: E721 - return obj - return pickle.dumps(obj, self.protocol) - - def loads(self, data): - try: - return int(data) - except (ValueError, TypeError): - return pickle.loads(data) # noqa: S301 - - -class MongoDBCache(BaseCache): - pickle_protocol = pickle.HIGHEST_PROTOCOL - - def __init__(self, collection_name, params): - super().__init__(params) - self._collection_name = collection_name - - class CacheEntry: - _meta = Options(collection_name) - - self.cache_model_class = CacheEntry - - def create_indexes(self): - expires_index = IndexModel("expires_at", expireAfterSeconds=0) - key_index = IndexModel("key", unique=True) - self.collection_for_write.create_indexes([expires_index, key_index]) - - @cached_property - def serializer(self): - return MongoSerializer(self.pickle_protocol) - - @property - def collection_for_read(self): - db = router.db_for_read(self.cache_model_class) - return connections[db].get_collection(self._collection_name) - - @property - def collection_for_write(self): - db = router.db_for_write(self.cache_model_class) - return connections[db].get_collection(self._collection_name) - - def _filter_expired(self, expired=False): - """ - Return MQL to exclude expired entries (needed because the MongoDB - daemon does not remove expired entries precisely when they expire). - If expired=True, return MQL to include only expired entries. - """ - op = "$lt" if expired else "$gte" - return {"expires_at": {op: datetime.utcnow()}} - - def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): - if timeout is None: - return datetime.max - timestamp = super().get_backend_timeout(timeout) - return datetime.fromtimestamp(timestamp, tz=timezone.utc) - - def get(self, key, default=None, version=None): - return self.get_many([key], version).get(key, default) - - def get_many(self, keys, version=None): - if not keys: - return {} - keys_map = {self.make_and_validate_key(key, version=version): key for key in keys} - with self.collection_for_read.find( - {"key": {"$in": tuple(keys_map)}, **self._filter_expired(expired=False)} - ) as cursor: - return {keys_map[row["key"]]: self.serializer.loads(row["value"]) for row in cursor} - - def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - key = self.make_and_validate_key(key, version=version) - num = self.collection_for_write.count_documents({}, hint="_id_") - if num >= self._max_entries: - self._cull(num) - self.collection_for_write.update_one( - {"key": key}, - { - "$set": { - "key": key, - "value": self.serializer.dumps(value), - "expires_at": self.get_backend_timeout(timeout), - } - }, - upsert=True, - ) - - def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - key = self.make_and_validate_key(key, version=version) - num = self.collection_for_write.count_documents({}, hint="_id_") - if num >= self._max_entries: - self._cull(num) - try: - self.collection_for_write.update_one( - {"key": key, **self._filter_expired(expired=True)}, - { - "$set": { - "key": key, - "value": self.serializer.dumps(value), - "expires_at": self.get_backend_timeout(timeout), - } - }, - upsert=True, - ) - except DuplicateKeyError: - return False - return True - - def _cull(self, num): - if self._cull_frequency == 0: - self.clear() - else: - # The fraction of entries that are culled when MAX_ENTRIES is - # reached is 1 / CULL_FREQUENCY. For example, in the default case - # of CULL_FREQUENCY=3, 2/3 of the entries are kept, thus `keep_num` - # will be 2/3 of the current number of entries. - keep_num = num - num // self._cull_frequency - try: - # Find the first cache entry beyond the retention limit, - # culling entries that expire the soonest. - deleted_from = next( - self.collection_for_write.aggregate( - [ - {"$sort": {"expires_at": DESCENDING, "key": ASCENDING}}, - {"$skip": keep_num}, - {"$limit": 1}, - {"$project": {"key": 1, "expires_at": 1}}, - ] - ) - ) - except StopIteration: - # If no entries are found, there is nothing to delete. It may - # happen if the database removes expired entries between the - # query to get `num` and the query to get `deleted_from`. - pass - else: - # Cull the cache. - self.collection_for_write.delete_many( - { - "$or": [ - # Delete keys that expire before `deleted_from`... - {"expires_at": {"$lt": deleted_from["expires_at"]}}, - # and the entries that share an expiration with - # `deleted_from` but are alphabetically after it - # (per the same sorting to fetch `deleted_from`). - { - "$and": [ - {"expires_at": deleted_from["expires_at"]}, - {"key": {"$gte": deleted_from["key"]}}, - ] - }, - ] - } - ) - - def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): - key = self.make_and_validate_key(key, version=version) - res = self.collection_for_write.update_one( - {"key": key}, {"$set": {"expires_at": self.get_backend_timeout(timeout)}} - ) - return res.matched_count > 0 - - def incr(self, key, delta=1, version=None): - serialized_key = self.make_and_validate_key(key, version=version) - try: - updated = self.collection_for_write.find_one_and_update( - {"key": serialized_key, **self._filter_expired(expired=False)}, - {"$inc": {"value": delta}}, - return_document=ReturnDocument.AFTER, - ) - except OperationFailure as exc: - method_name = "incr" if delta >= 1 else "decr" - raise TypeError(f"Cannot apply {method_name}() to a non-numeric value.") from exc - if updated is None: - raise ValueError(f"Key '{key}' not found.") from None - return updated["value"] - - def delete(self, key, version=None): - return self._delete_many([key], version) - - def delete_many(self, keys, version=None): - self._delete_many(keys, version) - - def _delete_many(self, keys, version=None): - if not keys: - return False - keys = tuple(self.make_and_validate_key(key, version=version) for key in keys) - return bool(self.collection_for_write.delete_many({"key": {"$in": keys}}).deleted_count) - - def has_key(self, key, version=None): - key = self.make_and_validate_key(key, version=version) - num = self.collection_for_read.count_documents( - {"key": key, **self._filter_expired(expired=False)} - ) - return num > 0 - - def clear(self): - self.collection_for_write.delete_many({}) diff --git a/django_mongodb_backend/creation.py b/django_mongodb_backend/creation.py index 50a648c15..76d9e4b4f 100644 --- a/django_mongodb_backend/creation.py +++ b/django_mongodb_backend/creation.py @@ -1,10 +1,6 @@ from django.conf import settings from django.db.backends.base.creation import BaseDatabaseCreation -from django_mongodb_backend.management.commands.createcachecollection import ( - Command as CreateCacheCollection, -) - class DatabaseCreation(BaseDatabaseCreation): def _execute_create_test_db(self, cursor, parameters, keepdb=False): @@ -20,12 +16,3 @@ def _destroy_test_db(self, test_database_name, verbosity): for collection in self.connection.introspection.table_names(): if not collection.startswith("system."): self.connection.database.drop_collection(collection) - - def create_test_db(self, *args, **kwargs): - test_database_name = super().create_test_db(*args, **kwargs) - # Not using call_command() avoids the requirement to put - # "django_mongodb_backend" in INSTALLED_APPS. - CreateCacheCollection().handle( - database=self.connection.alias, verbosity=kwargs["verbosity"] - ) - return test_database_name diff --git a/django_mongodb_backend/management/__init__.py b/django_mongodb_backend/management/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/django_mongodb_backend/management/commands/__init__.py b/django_mongodb_backend/management/commands/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/django_mongodb_backend/management/commands/createcachecollection.py b/django_mongodb_backend/management/commands/createcachecollection.py deleted file mode 100644 index 389c24335..000000000 --- a/django_mongodb_backend/management/commands/createcachecollection.py +++ /dev/null @@ -1,50 +0,0 @@ -from django.conf import settings -from django.core.cache import caches -from django.core.management.base import BaseCommand -from django.db import DEFAULT_DB_ALIAS, connections, router - -from django_mongodb_backend.cache import MongoDBCache - - -class Command(BaseCommand): - help = "Creates the collections needed to use the MongoDB cache backend." - requires_system_checks = [] - - def add_arguments(self, parser): - parser.add_argument( - "args", - metavar="collection_name", - nargs="*", - help="Optional collections names. Otherwise, settings.CACHES is " - "used to find cache collections.", - ) - parser.add_argument( - "--database", - default=DEFAULT_DB_ALIAS, - help="Nominates a database onto which the cache collections will be " - 'installed. Defaults to the "default" database.', - ) - - def handle(self, *collection_names, **options): - db = options["database"] - self.verbosity = options["verbosity"] - if collection_names: - # Legacy behavior, collection_name specified as argument - for collection_name in collection_names: - self.check_collection(db, collection_name) - else: - for cache_alias in settings.CACHES: - cache = caches[cache_alias] - if isinstance(cache, MongoDBCache): - self.check_collection(db, cache._collection_name) - - def check_collection(self, database, collection_name): - cache = MongoDBCache(collection_name, {}) - if not router.allow_migrate_model(database, cache.cache_model_class): - return - connection = connections[database] - if cache._collection_name in connection.introspection.table_names(): - if self.verbosity > 0: - self.stdout.write("Cache collection '%s' already exists." % cache._collection_name) - return - cache.create_indexes() diff --git a/docs/source/index.rst b/docs/source/index.rst index bdd981625..5d7ac4433 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -49,11 +49,6 @@ Forms - :doc:`ref/forms` -Core functionalities -==================== - -- :doc:`topics/cache` - Miscellaneous ============= diff --git a/docs/source/ref/django-admin.rst b/docs/source/ref/django-admin.rst deleted file mode 100644 index 34e7a45ba..000000000 --- a/docs/source/ref/django-admin.rst +++ /dev/null @@ -1,28 +0,0 @@ -=================== -Management commands -=================== - -Django MongoDB Backend includes some :doc:`Django management commands -`. - -Required configuration -====================== - -To make these commands available, you must include ``"django_mongodb_backend"`` -in the :setting:`INSTALLED_APPS` setting. - -Available commands -================== - -``createcachecollection`` -------------------------- - -.. django-admin:: createcachecollection - -Creates the cache collection for use with the :doc:`database cache backend -` using the information from your :setting:`CACHES` setting. - -.. django-admin-option:: --database DATABASE - -Specifies the database in which the cache collection(s) will be created. -Defaults to ``default``. diff --git a/docs/source/ref/index.rst b/docs/source/ref/index.rst index 25950937b..08fac9240 100644 --- a/docs/source/ref/index.rst +++ b/docs/source/ref/index.rst @@ -7,5 +7,4 @@ API reference models/index forms - django-admin utils diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index b2ca4c180..460daa4ec 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -7,6 +7,10 @@ Django MongoDB Backend 5.1.x *Unreleased* +- Backward-incompatible: Removed support for database caching as the MongoDB security + team considers the cache backend's ``pickle`` encoding of cached values a + vulnerability. If an attacker compromises the database, they could run arbitrary + commands on the application server. - Fixed crash when loading models with a null value for ``ArrayField``\s where the ``base_field`` uses a database converter. - Fixed ``RecursionError`` when using ``Trunc`` database functions on non-MongoDB @@ -39,7 +43,7 @@ Django MongoDB Backend 5.1.x :attr:`~.ArrayField.size` parameter is renamed to :attr:`~.ArrayField.max_size`. The :attr:`~.ArrayField.size` parameter is now used to enforce fixed-length arrays. -- Added support for :doc:`database caching `. +- Added support for database caching (later removed in beta 4). - Fixed ``QuerySet.raw_aggregate()`` field initialization when the document key order doesn't match the order of the model's fields. diff --git a/docs/source/topics/cache.rst b/docs/source/topics/cache.rst deleted file mode 100644 index 881e1b78b..000000000 --- a/docs/source/topics/cache.rst +++ /dev/null @@ -1,61 +0,0 @@ -================ -Database caching -================ - -.. class:: django_mongodb_backend.cache.MongoDBCache - -You can configure :doc:`Django's caching API ` to store -its data in MongoDB. - -To use a database collection as your cache backend: - -* Set :setting:`BACKEND ` to - ``django_mongodb_backend.cache.MongoDBCache`` - -* Set :setting:`LOCATION ` to ``collection_name``, the name of - the MongoDB collection. This name can be whatever you want, as long as it's a - valid collection name that's not already being used in your database. - -In this example, the cache collection's name is ``my_cache_collection``:: - - CACHES = { - "default": { - "BACKEND": "django_mongodb_backend.cache.MongoDBCache", - "LOCATION": "my_cache_collection", - }, - } - -Unlike Django's built-in database cache backend, this backend supports -automatic culling of expired entries at the database level. - -In addition, the cache is culled based on ``CULL_FREQUENCY`` when ``add()`` -or ``set()`` is called, if ``MAX_ENTRIES`` is exceeded. See -:ref:`django:cache_arguments` for an explanation of these two options. - -Creating the cache collection -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Before using the database cache, you must create the cache collection with this -command: - -.. code-block:: shell - - python manage.py createcachecollection - -.. admonition:: Didn't work? - - If you get the error ``Unknown command: 'createcachecollection'``, ensure - ``"django_mongodb_backend"`` is in your :setting:`INSTALLED_APPS` setting. - -This creates a collection in your database with the proper indexes. The name of -the collection is taken from :setting:`LOCATION `. - -If you are using multiple database caches, :djadmin:`createcachecollection` -creates one collection for each cache. - -If you are using multiple databases, :djadmin:`createcachecollection` observes -the ``allow_migrate()`` method of your database routers (see the -:ref:`database-caching-multiple-databases` section of Django's caching docs). - -:djadmin:`createcachecollection` won't touch an existing collection. It will -only create missing collections. diff --git a/docs/source/topics/index.rst b/docs/source/topics/index.rst index 47e0c6dc0..63ff9a250 100644 --- a/docs/source/topics/index.rst +++ b/docs/source/topics/index.rst @@ -8,6 +8,5 @@ know: .. toctree:: :maxdepth: 2 - cache embedded-models known-issues diff --git a/docs/source/topics/known-issues.rst b/docs/source/topics/known-issues.rst index 4779eb782..dc99dcd9a 100644 --- a/docs/source/topics/known-issues.rst +++ b/docs/source/topics/known-issues.rst @@ -94,10 +94,6 @@ Due to the lack of ability to introspect MongoDB collection schema, Caching ======= -:doc:`Database caching ` uses this library's -:djadmin:`createcachecollection` command rather Django's SQL-specific -:djadmin:`createcachetable` command. - -Secondly, you must use the :class:`django_mongodb_backend.cache.MongoDBCache` -backend rather than Django's built-in database cache backend, -``django.core.cache.backends.db.DatabaseCache``. +:ref:`Database caching ` is not supported since Django's built-in +database cache backend requires SQL. A custom cache backend for MongoDB may be provided +in the future. diff --git a/tests/cache_/__init__.py b/tests/cache_/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/cache_/models.py b/tests/cache_/models.py deleted file mode 100644 index e0aa6ab4d..000000000 --- a/tests/cache_/models.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.db import models -from django.utils import timezone - - -def expensive_calculation(): - expensive_calculation.num_runs += 1 - return timezone.now() - - -class Poll(models.Model): - question = models.CharField(max_length=200) - answer = models.CharField(max_length=200) - pub_date = models.DateTimeField("date published", default=expensive_calculation) diff --git a/tests/cache_/tests.py b/tests/cache_/tests.py deleted file mode 100644 index c28b549e5..000000000 --- a/tests/cache_/tests.py +++ /dev/null @@ -1,1000 +0,0 @@ -"""These tests are forked from Django's tests/cache/tests.py.""" -import os -import pickle -import time -from functools import wraps -from unittest import mock - -from bson import SON -from django.conf import settings -from django.core import management -from django.core.cache import DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches -from django.core.cache.backends.base import InvalidCacheBackendError -from django.http import HttpResponse -from django.middleware.cache import FetchFromCacheMiddleware, UpdateCacheMiddleware -from django.test import RequestFactory, TestCase, modify_settings, override_settings - -from .models import Poll, expensive_calculation - -KEY_ERRORS_WITH_MEMCACHED_MSG = ( - "Cache key contains characters that will cause errors if used with memcached: %r" -) - - -def f(): - return 42 - - -class C: - def m(n): - return 24 - - -class Unpicklable: - def __getstate__(self): - raise pickle.PickleError() - - -def empty_response(request): # noqa: ARG001 - return HttpResponse() - - -def retry(retries=3, delay=1): - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - attempts = 0 - while attempts < retries: - try: - return func(*args, **kwargs) - except AssertionError: - attempts += 1 - if attempts >= retries: - raise - time.sleep(delay) - return None - - return wrapper - - return decorator - - -def custom_key_func(key, key_prefix, version): - "A customized cache key function" - return "CUSTOM-" + "-".join([key_prefix, str(version), key]) - - -_caches_setting_base = { - "default": {}, - "prefix": {"KEY_PREFIX": f"cacheprefix{os.getpid()}"}, - "v2": {"VERSION": 2}, - "custom_key": {"KEY_FUNCTION": custom_key_func}, - "custom_key2": {"KEY_FUNCTION": "cache_.tests.custom_key_func"}, - "cull": {"OPTIONS": {"MAX_ENTRIES": 30}}, - "zero_cull": {"OPTIONS": {"CULL_FREQUENCY": 0, "MAX_ENTRIES": 30}}, -} - - -def caches_setting_for_tests(base=None, exclude=None, **params): - # `base` is used to pull in the memcached config from the original settings, - # `exclude` is a set of cache names denoting which `_caches_setting_base` keys - # should be omitted. - # `params` are test specific overrides and `_caches_settings_base` is the - # base config for the tests. - # This results in the following search order: - # params -> _caches_setting_base -> base - base = base or {} - exclude = exclude or set() - setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} - for key, cache_params in setting.items(): - cache_params.update(_caches_setting_base[key]) - cache_params.update(params) - return setting - - -@override_settings( - CACHES=caches_setting_for_tests( - BACKEND="django_mongodb_backend.cache.MongoDBCache", - # Spaces are used in the name to ensure quoting/escaping works. - LOCATION="test cache collection", - ), -) -@modify_settings( - INSTALLED_APPS={"prepend": "django_mongodb_backend"}, -) -class CacheTests(TestCase): - factory = RequestFactory() - incr_decr_type_error_msg = "Cannot apply %s() to a non-numeric value." - - def setUp(self): - # The super calls needs to happen first for the settings override. - super().setUp() - self.create_cache_collection() - self.addCleanup(self.drop_collection) - - def create_cache_collection(self): - management.call_command("createcachecollection", verbosity=0) - - def drop_collection(self): - cache.collection_for_write.drop() - - def test_simple(self): - # Simple cache set/get works - cache.set("key", "value") - self.assertEqual(cache.get("key"), "value") - - def test_default_used_when_none_is_set(self): - """If None is cached, get() returns it instead of the default.""" - cache.set("key_default_none", None) - self.assertIsNone(cache.get("key_default_none", default="default")) - - def test_add(self): - # A key can be added to a cache - self.assertIs(cache.add("addkey1", "value"), True) - self.assertIs(cache.add("addkey1", "newvalue"), False) - self.assertEqual(cache.get("addkey1"), "value") - - def test_prefix(self): - # Test for same cache key conflicts between shared backend - cache.set("somekey", "value") - - # should not be set in the prefixed cache - self.assertIs(caches["prefix"].has_key("somekey"), False) - - caches["prefix"].set("somekey", "value2") - - self.assertEqual(cache.get("somekey"), "value") - self.assertEqual(caches["prefix"].get("somekey"), "value2") - - def test_non_existent(self): - """Nonexistent cache keys return as None/default.""" - self.assertIsNone(cache.get("does_not_exist")) - self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") - - def test_get_many(self): - # Multiple cache keys can be returned using get_many - cache.set_many({"a": "a", "b": "b", "c": "c", "d": "d"}) - self.assertEqual(cache.get_many(["a", "c", "d"]), {"a": "a", "c": "c", "d": "d"}) - self.assertEqual(cache.get_many(["a", "b", "e"]), {"a": "a", "b": "b"}) - self.assertEqual(cache.get_many(iter(["a", "b", "e"])), {"a": "a", "b": "b"}) - cache.set_many({"x": None, "y": 1}) - self.assertEqual(cache.get_many(["x", "y"]), {"x": None, "y": 1}) - - def test_delete(self): - # Cache keys can be deleted - cache.set_many({"key1": "spam", "key2": "eggs"}) - self.assertEqual(cache.get("key1"), "spam") - self.assertIs(cache.delete("key1"), True) - self.assertIsNone(cache.get("key1")) - self.assertEqual(cache.get("key2"), "eggs") - - def test_delete_nonexistent(self): - self.assertIs(cache.delete("nonexistent_key"), False) - - def test_has_key(self): - # The cache can be inspected for cache keys - cache.set("hello1", "goodbye1") - self.assertIs(cache.has_key("hello1"), True) - self.assertIs(cache.has_key("goodbye1"), False) - cache.set("no_expiry", "here", None) - self.assertIs(cache.has_key("no_expiry"), True) - cache.set("null", None) - self.assertIs(cache.has_key("null"), True) - - def test_in(self): - # The in operator can be used to inspect cache contents - cache.set("hello2", "goodbye2") - self.assertIn("hello2", cache) - self.assertNotIn("goodbye2", cache) - cache.set("null", None) - self.assertIn("null", cache) - - def test_incr(self): - # Cache values can be incremented - cache.set("answer", 41) - self.assertEqual(cache.incr("answer"), 42) - self.assertEqual(cache.get("answer"), 42) - self.assertEqual(cache.incr("answer", 10), 52) - self.assertEqual(cache.get("answer"), 52) - self.assertEqual(cache.incr("answer", -10), 42) - with self.assertRaisesMessage(ValueError, "Key 'does_not_exist' not found."): - cache.incr("does_not_exist") - with self.assertRaisesMessage(ValueError, "Key 'does_not_exist' not found."): - cache.incr("does_not_exist", -1) - cache.set("null", None) - with self.assertRaisesMessage(TypeError, self.incr_decr_type_error_msg % "incr"): - cache.incr("null") - - def test_decr(self): - # Cache values can be decremented - cache.set("answer", 43) - self.assertEqual(cache.decr("answer"), 42) - self.assertEqual(cache.get("answer"), 42) - self.assertEqual(cache.decr("answer", 10), 32) - self.assertEqual(cache.get("answer"), 32) - self.assertEqual(cache.decr("answer", -10), 42) - with self.assertRaisesMessage(ValueError, "Key 'does_not_exist' not found."): - cache.decr("does_not_exist") - with self.assertRaisesMessage(ValueError, "Key 'does_not_exist' not found."): - cache.incr("does_not_exist", -1) - cache.set("null", None) - with self.assertRaisesMessage(TypeError, self.incr_decr_type_error_msg % "decr"): - cache.decr("null") - - def test_close(self): - self.assertTrue(hasattr(cache, "close")) - cache.close() - - def test_data_types(self): - # Many different data types can be cached - tests = { - "string": "this is a string", - "int": 42, - "bool": True, - "list": [1, 2, 3, 4], - "tuple": (1, 2, 3, 4), - "dict": {"A": 1, "B": 2}, - "function": f, - "class": C, - } - for key, value in tests.items(): - with self.subTest(key=key): - cache.set(key, value) - self.assertEqual(cache.get(key), value) - - def test_cache_read_for_model_instance(self): - # Don't want fields with callable as default to be called on cache read - expensive_calculation.num_runs = 0 - Poll.objects.all().delete() - my_poll = Poll.objects.create(question="Well?") - self.assertEqual(Poll.objects.count(), 1) - pub_date = my_poll.pub_date - cache.set("question", my_poll) - cached_poll = cache.get("question") - self.assertEqual(cached_poll.pub_date, pub_date) - # We only want the default expensive calculation run once - self.assertEqual(expensive_calculation.num_runs, 1) - - def test_cache_write_for_model_instance_with_deferred(self): - # Don't want fields with callable as default to be called on cache write - expensive_calculation.num_runs = 0 - Poll.objects.all().delete() - Poll.objects.create(question="What?") - self.assertEqual(expensive_calculation.num_runs, 1) - defer_qs = Poll.objects.defer("question") - self.assertEqual(defer_qs.count(), 1) - self.assertEqual(expensive_calculation.num_runs, 1) - cache.set("deferred_queryset", defer_qs) - # cache set should not re-evaluate default functions - self.assertEqual(expensive_calculation.num_runs, 1) - - def test_cache_read_for_model_instance_with_deferred(self): - # Don't want fields with callable as default to be called on cache read - expensive_calculation.num_runs = 0 - Poll.objects.all().delete() - Poll.objects.create(question="What?") - self.assertEqual(expensive_calculation.num_runs, 1) - defer_qs = Poll.objects.defer("question") - self.assertEqual(defer_qs.count(), 1) - cache.set("deferred_queryset", defer_qs) - self.assertEqual(expensive_calculation.num_runs, 1) - runs_before_cache_read = expensive_calculation.num_runs - cache.get("deferred_queryset") - # We only want the default expensive calculation run on creation and set - self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) - - def test_expiration(self): - # Cache values can be set to expire - cache.set("expire1", "very quickly", 1) - cache.set("expire2", "very quickly", 1) - cache.set("expire3", "very quickly", 1) - - time.sleep(2) - self.assertIsNone(cache.get("expire1")) - - self.assertIs(cache.add("expire2", "newvalue"), True) - self.assertEqual(cache.get("expire2"), "newvalue") - self.assertIs(cache.has_key("expire3"), False) - - @retry() - def test_touch(self): - # cache.touch() updates the timeout. - cache.set("expire1", "very quickly", timeout=1) - self.assertIs(cache.touch("expire1", timeout=4), True) - time.sleep(2) - self.assertIs(cache.has_key("expire1"), True) - time.sleep(3) - self.assertIs(cache.has_key("expire1"), False) - # cache.touch() works without the timeout argument. - cache.set("expire1", "very quickly", timeout=1) - self.assertIs(cache.touch("expire1"), True) - time.sleep(2) - self.assertIs(cache.has_key("expire1"), True) - - self.assertIs(cache.touch("nonexistent"), False) - - def test_unicode(self): - # Unicode values can be cached - stuff = { - "ascii": "ascii_value", - "unicode_ascii": "Iñtërnâtiônàlizætiøn1", - "Iñtërnâtiônàlizætiøn": "Iñtërnâtiônàlizætiøn2", - "ascii2": {"x": 1}, - } - # Test `set` - for key, value in stuff.items(): - with self.subTest(key=key): - cache.set(key, value) - self.assertEqual(cache.get(key), value) - - # Test `add` - for key, value in stuff.items(): - with self.subTest(key=key): - self.assertIs(cache.delete(key), True) - self.assertIs(cache.add(key, value), True) - self.assertEqual(cache.get(key), value) - - # Test `set_many` - for key in stuff: - self.assertIs(cache.delete(key), True) - cache.set_many(stuff) - for key, value in stuff.items(): - with self.subTest(key=key): - self.assertEqual(cache.get(key), value) - - def test_binary_string(self): - # Binary strings should be cacheable - from zlib import compress, decompress - - value = "value_to_be_compressed" - compressed_value = compress(value.encode()) - - # Test set - cache.set("binary1", compressed_value) - compressed_result = cache.get("binary1") - self.assertEqual(compressed_value, compressed_result) - self.assertEqual(value, decompress(compressed_result).decode()) - - # Test add - self.assertIs(cache.add("binary1-add", compressed_value), True) - compressed_result = cache.get("binary1-add") - self.assertEqual(compressed_value, compressed_result) - self.assertEqual(value, decompress(compressed_result).decode()) - - # Test set_many - cache.set_many({"binary1-set_many": compressed_value}) - compressed_result = cache.get("binary1-set_many") - self.assertEqual(compressed_value, compressed_result) - self.assertEqual(value, decompress(compressed_result).decode()) - - def test_set_many(self): - # Multiple keys can be set using set_many - cache.set_many({"key1": "spam", "key2": "eggs"}) - self.assertEqual(cache.get("key1"), "spam") - self.assertEqual(cache.get("key2"), "eggs") - - def test_set_many_returns_empty_list_on_success(self): - """set_many() returns an empty list when all keys are inserted.""" - failing_keys = cache.set_many({"key1": "spam", "key2": "eggs"}) - self.assertEqual(failing_keys, []) - - def test_set_many_expiration(self): - # set_many takes a second ``timeout`` parameter - cache.set_many({"key1": "spam", "key2": "eggs"}, 1) - time.sleep(2) - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) - - def test_set_many_empty_data(self): - self.assertEqual(cache.set_many({}), []) - - def test_delete_many(self): - # Multiple keys can be deleted using delete_many - cache.set_many({"key1": "spam", "key2": "eggs", "key3": "ham"}) - cache.delete_many(["key1", "key2"]) - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) - self.assertEqual(cache.get("key3"), "ham") - - def test_delete_many_no_keys(self): - self.assertIsNone(cache.delete_many([])) - - def test_clear(self): - # The cache can be emptied using clear - cache.set_many({"key1": "spam", "key2": "eggs"}) - cache.clear() - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) - - def test_long_timeout(self): - """ - Follow memcached's convention where a timeout greater than 30 days is - treated as an absolute expiration timestamp instead of a relative - offset (#12399). - """ - cache.set("key1", "eggs", 60 * 60 * 24 * 30 + 1) # 30 days + 1 second - self.assertEqual(cache.get("key1"), "eggs") - - self.assertIs(cache.add("key2", "ham", 60 * 60 * 24 * 30 + 1), True) - self.assertEqual(cache.get("key2"), "ham") - - cache.set_many({"key3": "sausage", "key4": "lobster bisque"}, 60 * 60 * 24 * 30 + 1) - self.assertEqual(cache.get("key3"), "sausage") - self.assertEqual(cache.get("key4"), "lobster bisque") - - @retry() - def test_forever_timeout(self): - """ - Passing in None into timeout results in a value that is cached forever - """ - cache.set("key1", "eggs", None) - self.assertEqual(cache.get("key1"), "eggs") - - self.assertIs(cache.add("key2", "ham", None), True) - self.assertEqual(cache.get("key2"), "ham") - self.assertIs(cache.add("key1", "new eggs", None), False) - self.assertEqual(cache.get("key1"), "eggs") - - cache.set_many({"key3": "sausage", "key4": "lobster bisque"}, None) - self.assertEqual(cache.get("key3"), "sausage") - self.assertEqual(cache.get("key4"), "lobster bisque") - - cache.set("key5", "belgian fries", timeout=1) - self.assertIs(cache.touch("key5", timeout=None), True) - time.sleep(2) - self.assertEqual(cache.get("key5"), "belgian fries") - - def test_zero_timeout(self): - """ - Passing in zero into timeout results in a value that is not cached - """ - cache.set("key1", "eggs", 0) - self.assertIsNone(cache.get("key1")) - - self.assertIs(cache.add("key2", "ham", 0), True) - self.assertIsNone(cache.get("key2")) - - cache.set_many({"key3": "sausage", "key4": "lobster bisque"}, 0) - self.assertIsNone(cache.get("key3")) - self.assertIsNone(cache.get("key4")) - - cache.set("key5", "belgian fries", timeout=5) - self.assertIs(cache.touch("key5", timeout=0), True) - self.assertIsNone(cache.get("key5")) - - def test_float_timeout(self): - # Make sure a timeout given as a float doesn't crash anything. - cache.set("key1", "spam", 100.2) - self.assertEqual(cache.get("key1"), "spam") - - def _perform_cull_test(self, cull_cache_name, initial_count, final_count): - try: - cull_cache = caches[cull_cache_name] - except InvalidCacheBackendError: - self.skipTest("Culling isn't implemented.") - - # Create initial cache key entries. This will overflow the cache, - # causing a cull. - for i in range(1, initial_count): - cull_cache.set("cull%d" % i, "value", 1000) - count = 0 - # Count how many keys are left in the cache. - for i in range(1, initial_count): - if cull_cache.has_key("cull%d" % i): - count += 1 - self.assertEqual(count, final_count) - - def test_cull(self): - self._perform_cull_test("cull", 50, 29) - - def test_zero_cull(self): - self._perform_cull_test("zero_cull", 50, 19) - - def test_cull_delete_when_store_empty(self): - try: - cull_cache = caches["cull"] - except InvalidCacheBackendError: - self.skipTest("Culling isn't implemented.") - old_max_entries = cull_cache._max_entries - # Force _cull to delete on first cached record. - cull_cache._max_entries = -1 - try: - cull_cache.set("force_cull_delete", "value", 1000) - self.assertIs(cull_cache.has_key("force_cull_delete"), True) - finally: - cull_cache._max_entries = old_max_entries - - def _perform_invalid_key_test(self, key, expected_warning, key_func=None): - """ - All the builtin backends should warn (except memcached that should - error) on keys that would be refused by memcached. This encourages - portable caching code without making it too difficult to use production - backends with more liberal key rules. Refs #6447. - """ - - # mimic custom ``make_key`` method being defined since the default will - # never show the below warnings - def func(key, *args): # noqa: ARG001 - return key - - old_func = cache.key_func - cache.key_func = key_func or func - - tests = [ - ("add", [key, 1]), - ("get", [key]), - ("set", [key, 1]), - ("incr", [key]), - ("decr", [key]), - ("touch", [key]), - ("delete", [key]), - ("get_many", [[key, "b"]]), - ("set_many", [{key: 1, "b": 2}]), - ("delete_many", [[key, "b"]]), - ] - try: - for operation, args in tests: - with self.subTest(operation=operation): - with self.assertWarns(CacheKeyWarning) as cm: - getattr(cache, operation)(*args) - self.assertEqual(str(cm.warning), expected_warning) - finally: - cache.key_func = old_func - - def test_invalid_key_characters(self): - # memcached doesn't allow whitespace or control characters in keys. - key = "key with spaces and 清" - self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) - - def test_invalid_key_length(self): - # memcached limits key length to 250. - key = ("a" * 250) + "清" - expected_warning = ( - "Cache key will cause errors if used with memcached: " f"'{key}' (longer than 250)" - ) - self._perform_invalid_key_test(key, expected_warning) - - def test_invalid_with_version_key_length(self): - # Custom make_key() that adds a version to the key and exceeds the - # limit. - def key_func(key, *args): # noqa: ARG001 - return key + ":1" - - key = "a" * 249 - expected_warning = ( - "Cache key will cause errors if used with memcached: " - f"'{key_func(key)}' (longer than 250)" - ) - self._perform_invalid_key_test(key, expected_warning, key_func=key_func) - - def test_cache_versioning_get_set(self): - # set, using default version = 1 - cache.set("answer1", 42) - self.assertEqual(cache.get("answer1"), 42) - self.assertEqual(cache.get("answer1", version=1), 42) - self.assertIsNone(cache.get("answer1", version=2)) - - self.assertIsNone(caches["v2"].get("answer1")) - self.assertEqual(caches["v2"].get("answer1", version=1), 42) - self.assertIsNone(caches["v2"].get("answer1", version=2)) - - # set, default version = 1, but manually override version = 2 - cache.set("answer2", 42, version=2) - self.assertIsNone(cache.get("answer2")) - self.assertIsNone(cache.get("answer2", version=1)) - self.assertEqual(cache.get("answer2", version=2), 42) - - self.assertEqual(caches["v2"].get("answer2"), 42) - self.assertIsNone(caches["v2"].get("answer2", version=1)) - self.assertEqual(caches["v2"].get("answer2", version=2), 42) - - # v2 set, using default version = 2 - caches["v2"].set("answer3", 42) - self.assertIsNone(cache.get("answer3")) - self.assertIsNone(cache.get("answer3", version=1)) - self.assertEqual(cache.get("answer3", version=2), 42) - - self.assertEqual(caches["v2"].get("answer3"), 42) - self.assertIsNone(caches["v2"].get("answer3", version=1)) - self.assertEqual(caches["v2"].get("answer3", version=2), 42) - - # v2 set, default version = 2, but manually override version = 1 - caches["v2"].set("answer4", 42, version=1) - self.assertEqual(cache.get("answer4"), 42) - self.assertEqual(cache.get("answer4", version=1), 42) - self.assertIsNone(cache.get("answer4", version=2)) - - self.assertIsNone(caches["v2"].get("answer4")) - self.assertEqual(caches["v2"].get("answer4", version=1), 42) - self.assertIsNone(caches["v2"].get("answer4", version=2)) - - def test_cache_versioning_add(self): - # add, default version = 1, but manually override version = 2 - self.assertIs(cache.add("answer1", 42, version=2), True) - self.assertIsNone(cache.get("answer1", version=1)) - self.assertEqual(cache.get("answer1", version=2), 42) - - self.assertIs(cache.add("answer1", 37, version=2), False) - self.assertIsNone(cache.get("answer1", version=1)) - self.assertEqual(cache.get("answer1", version=2), 42) - - self.assertIs(cache.add("answer1", 37, version=1), True) - self.assertEqual(cache.get("answer1", version=1), 37) - self.assertEqual(cache.get("answer1", version=2), 42) - - # v2 add, using default version = 2 - self.assertIs(caches["v2"].add("answer2", 42), True) - self.assertIsNone(cache.get("answer2", version=1)) - self.assertEqual(cache.get("answer2", version=2), 42) - - self.assertIs(caches["v2"].add("answer2", 37), False) - self.assertIsNone(cache.get("answer2", version=1)) - self.assertEqual(cache.get("answer2", version=2), 42) - - self.assertIs(caches["v2"].add("answer2", 37, version=1), True) - self.assertEqual(cache.get("answer2", version=1), 37) - self.assertEqual(cache.get("answer2", version=2), 42) - - # v2 add, default version = 2, but manually override version = 1 - self.assertIs(caches["v2"].add("answer3", 42, version=1), True) - self.assertEqual(cache.get("answer3", version=1), 42) - self.assertIsNone(cache.get("answer3", version=2)) - - self.assertIs(caches["v2"].add("answer3", 37, version=1), False) - self.assertEqual(cache.get("answer3", version=1), 42) - self.assertIsNone(cache.get("answer3", version=2)) - - self.assertIs(caches["v2"].add("answer3", 37), True) - self.assertEqual(cache.get("answer3", version=1), 42) - self.assertEqual(cache.get("answer3", version=2), 37) - - def test_cache_versioning_has_key(self): - cache.set("answer1", 42) - - # has_key - self.assertIs(cache.has_key("answer1"), True) - self.assertIs(cache.has_key("answer1", version=1), True) - self.assertIs(cache.has_key("answer1", version=2), False) - - self.assertIs(caches["v2"].has_key("answer1"), False) - self.assertIs(caches["v2"].has_key("answer1", version=1), True) - self.assertIs(caches["v2"].has_key("answer1", version=2), False) - - def test_cache_versioning_delete(self): - cache.set("answer1", 37, version=1) - cache.set("answer1", 42, version=2) - self.assertIs(cache.delete("answer1"), True) - self.assertIsNone(cache.get("answer1", version=1)) - self.assertEqual(cache.get("answer1", version=2), 42) - - cache.set("answer2", 37, version=1) - cache.set("answer2", 42, version=2) - self.assertIs(cache.delete("answer2", version=2), True) - self.assertEqual(cache.get("answer2", version=1), 37) - self.assertIsNone(cache.get("answer2", version=2)) - - cache.set("answer3", 37, version=1) - cache.set("answer3", 42, version=2) - self.assertIs(caches["v2"].delete("answer3"), True) - self.assertEqual(cache.get("answer3", version=1), 37) - self.assertIsNone(cache.get("answer3", version=2)) - - cache.set("answer4", 37, version=1) - cache.set("answer4", 42, version=2) - self.assertIs(caches["v2"].delete("answer4", version=1), True) - self.assertIsNone(cache.get("answer4", version=1)) - self.assertEqual(cache.get("answer4", version=2), 42) - - def test_cache_versioning_incr_decr(self): - cache.set("answer1", 37, version=1) - cache.set("answer1", 42, version=2) - self.assertEqual(cache.incr("answer1"), 38) - self.assertEqual(cache.get("answer1", version=1), 38) - self.assertEqual(cache.get("answer1", version=2), 42) - self.assertEqual(cache.decr("answer1"), 37) - self.assertEqual(cache.get("answer1", version=1), 37) - self.assertEqual(cache.get("answer1", version=2), 42) - - cache.set("answer2", 37, version=1) - cache.set("answer2", 42, version=2) - self.assertEqual(cache.incr("answer2", version=2), 43) - self.assertEqual(cache.get("answer2", version=1), 37) - self.assertEqual(cache.get("answer2", version=2), 43) - self.assertEqual(cache.decr("answer2", version=2), 42) - self.assertEqual(cache.get("answer2", version=1), 37) - self.assertEqual(cache.get("answer2", version=2), 42) - - cache.set("answer3", 37, version=1) - cache.set("answer3", 42, version=2) - self.assertEqual(caches["v2"].incr("answer3"), 43) - self.assertEqual(cache.get("answer3", version=1), 37) - self.assertEqual(cache.get("answer3", version=2), 43) - self.assertEqual(caches["v2"].decr("answer3"), 42) - self.assertEqual(cache.get("answer3", version=1), 37) - self.assertEqual(cache.get("answer3", version=2), 42) - - cache.set("answer4", 37, version=1) - cache.set("answer4", 42, version=2) - self.assertEqual(caches["v2"].incr("answer4", version=1), 38) - self.assertEqual(cache.get("answer4", version=1), 38) - self.assertEqual(cache.get("answer4", version=2), 42) - self.assertEqual(caches["v2"].decr("answer4", version=1), 37) - self.assertEqual(cache.get("answer4", version=1), 37) - self.assertEqual(cache.get("answer4", version=2), 42) - - def test_cache_versioning_get_set_many(self): - # set, using default version = 1 - cache.set_many({"ford1": 37, "arthur1": 42}) - self.assertEqual(cache.get_many(["ford1", "arthur1"]), {"ford1": 37, "arthur1": 42}) - self.assertEqual( - cache.get_many(["ford1", "arthur1"], version=1), - {"ford1": 37, "arthur1": 42}, - ) - self.assertEqual(cache.get_many(["ford1", "arthur1"], version=2), {}) - - self.assertEqual(caches["v2"].get_many(["ford1", "arthur1"]), {}) - self.assertEqual( - caches["v2"].get_many(["ford1", "arthur1"], version=1), - {"ford1": 37, "arthur1": 42}, - ) - self.assertEqual(caches["v2"].get_many(["ford1", "arthur1"], version=2), {}) - - # set, default version = 1, but manually override version = 2 - cache.set_many({"ford2": 37, "arthur2": 42}, version=2) - self.assertEqual(cache.get_many(["ford2", "arthur2"]), {}) - self.assertEqual(cache.get_many(["ford2", "arthur2"], version=1), {}) - self.assertEqual( - cache.get_many(["ford2", "arthur2"], version=2), - {"ford2": 37, "arthur2": 42}, - ) - - self.assertEqual(caches["v2"].get_many(["ford2", "arthur2"]), {"ford2": 37, "arthur2": 42}) - self.assertEqual(caches["v2"].get_many(["ford2", "arthur2"], version=1), {}) - self.assertEqual( - caches["v2"].get_many(["ford2", "arthur2"], version=2), - {"ford2": 37, "arthur2": 42}, - ) - - # v2 set, using default version = 2 - caches["v2"].set_many({"ford3": 37, "arthur3": 42}) - self.assertEqual(cache.get_many(["ford3", "arthur3"]), {}) - self.assertEqual(cache.get_many(["ford3", "arthur3"], version=1), {}) - self.assertEqual( - cache.get_many(["ford3", "arthur3"], version=2), - {"ford3": 37, "arthur3": 42}, - ) - - self.assertEqual(caches["v2"].get_many(["ford3", "arthur3"]), {"ford3": 37, "arthur3": 42}) - self.assertEqual(caches["v2"].get_many(["ford3", "arthur3"], version=1), {}) - self.assertEqual( - caches["v2"].get_many(["ford3", "arthur3"], version=2), - {"ford3": 37, "arthur3": 42}, - ) - - # v2 set, default version = 2, but manually override version = 1 - caches["v2"].set_many({"ford4": 37, "arthur4": 42}, version=1) - self.assertEqual(cache.get_many(["ford4", "arthur4"]), {"ford4": 37, "arthur4": 42}) - self.assertEqual( - cache.get_many(["ford4", "arthur4"], version=1), - {"ford4": 37, "arthur4": 42}, - ) - self.assertEqual(cache.get_many(["ford4", "arthur4"], version=2), {}) - - self.assertEqual(caches["v2"].get_many(["ford4", "arthur4"]), {}) - self.assertEqual( - caches["v2"].get_many(["ford4", "arthur4"], version=1), - {"ford4": 37, "arthur4": 42}, - ) - self.assertEqual(caches["v2"].get_many(["ford4", "arthur4"], version=2), {}) - - def test_incr_version(self): - cache.set("answer", 42, version=2) - self.assertIsNone(cache.get("answer")) - self.assertIsNone(cache.get("answer", version=1)) - self.assertEqual(cache.get("answer", version=2), 42) - self.assertIsNone(cache.get("answer", version=3)) - - self.assertEqual(cache.incr_version("answer", version=2), 3) - self.assertIsNone(cache.get("answer")) - self.assertIsNone(cache.get("answer", version=1)) - self.assertIsNone(cache.get("answer", version=2)) - self.assertEqual(cache.get("answer", version=3), 42) - - caches["v2"].set("answer2", 42) - self.assertEqual(caches["v2"].get("answer2"), 42) - self.assertIsNone(caches["v2"].get("answer2", version=1)) - self.assertEqual(caches["v2"].get("answer2", version=2), 42) - self.assertIsNone(caches["v2"].get("answer2", version=3)) - - self.assertEqual(caches["v2"].incr_version("answer2"), 3) - self.assertIsNone(caches["v2"].get("answer2")) - self.assertIsNone(caches["v2"].get("answer2", version=1)) - self.assertIsNone(caches["v2"].get("answer2", version=2)) - self.assertEqual(caches["v2"].get("answer2", version=3), 42) - - with self.assertRaises(ValueError): - cache.incr_version("does_not_exist") - - cache.set("null", None) - self.assertEqual(cache.incr_version("null"), 2) - - def test_decr_version(self): - cache.set("answer", 42, version=2) - self.assertIsNone(cache.get("answer")) - self.assertIsNone(cache.get("answer", version=1)) - self.assertEqual(cache.get("answer", version=2), 42) - - self.assertEqual(cache.decr_version("answer", version=2), 1) - self.assertEqual(cache.get("answer"), 42) - self.assertEqual(cache.get("answer", version=1), 42) - self.assertIsNone(cache.get("answer", version=2)) - - caches["v2"].set("answer2", 42) - self.assertEqual(caches["v2"].get("answer2"), 42) - self.assertIsNone(caches["v2"].get("answer2", version=1)) - self.assertEqual(caches["v2"].get("answer2", version=2), 42) - - self.assertEqual(caches["v2"].decr_version("answer2"), 1) - self.assertIsNone(caches["v2"].get("answer2")) - self.assertEqual(caches["v2"].get("answer2", version=1), 42) - self.assertIsNone(caches["v2"].get("answer2", version=2)) - - with self.assertRaises(ValueError): - cache.decr_version("does_not_exist", version=2) - - cache.set("null", None, version=2) - self.assertEqual(cache.decr_version("null", version=2), 1) - - def test_custom_key_func(self): - # Two caches with different key functions aren't visible to each other - cache.set("answer1", 42) - self.assertEqual(cache.get("answer1"), 42) - self.assertIsNone(caches["custom_key"].get("answer1")) - self.assertIsNone(caches["custom_key2"].get("answer1")) - - caches["custom_key"].set("answer2", 42) - self.assertIsNone(cache.get("answer2")) - self.assertEqual(caches["custom_key"].get("answer2"), 42) - self.assertEqual(caches["custom_key2"].get("answer2"), 42) - - @override_settings(CACHE_MIDDLEWARE_ALIAS=DEFAULT_CACHE_ALIAS) - def test_cache_write_unpicklable_object(self): - fetch_middleware = FetchFromCacheMiddleware(empty_response) - - request = self.factory.get("/cache/test") - request._cache_update_cache = True - get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) - self.assertIsNone(get_cache_data) - - content = "Testing cookie serialization." - - def get_response(req): # noqa: ARG001 - response = HttpResponse(content) - response.set_cookie("foo", "bar") - return response - - update_middleware = UpdateCacheMiddleware(get_response) - response = update_middleware(request) - - get_cache_data = fetch_middleware.process_request(request) - self.assertIsNotNone(get_cache_data) - self.assertEqual(get_cache_data.content, content.encode()) - self.assertEqual(get_cache_data.cookies, response.cookies) - - UpdateCacheMiddleware(lambda req: get_cache_data)(request) # noqa: ARG005 - get_cache_data = fetch_middleware.process_request(request) - self.assertIsNotNone(get_cache_data) - self.assertEqual(get_cache_data.content, content.encode()) - self.assertEqual(get_cache_data.cookies, response.cookies) - - def test_add_fail_on_pickleerror(self): - # Shouldn't fail silently if trying to cache an unpicklable type. - with self.assertRaises(pickle.PickleError): - cache.add("unpicklable", Unpicklable()) - - def test_set_fail_on_pickleerror(self): - with self.assertRaises(pickle.PickleError): - cache.set("unpicklable", Unpicklable()) - - def test_get_or_set(self): - self.assertIsNone(cache.get("projector")) - self.assertEqual(cache.get_or_set("projector", 42), 42) - self.assertEqual(cache.get("projector"), 42) - self.assertIsNone(cache.get_or_set("null", None)) - # Previous get_or_set() stores None in the cache. - self.assertIsNone(cache.get("null", "default")) - - def test_get_or_set_callable(self): - def my_callable(): - return "value" - - self.assertEqual(cache.get_or_set("mykey", my_callable), "value") - self.assertEqual(cache.get_or_set("mykey", my_callable()), "value") - - self.assertIsNone(cache.get_or_set("null", lambda: None)) - # Previous get_or_set() stores None in the cache. - self.assertIsNone(cache.get("null", "default")) - - def test_get_or_set_version(self): - msg = "get_or_set() missing 1 required positional argument: 'default'" - self.assertEqual(cache.get_or_set("brian", 1979, version=2), 1979) - with self.assertRaisesMessage(TypeError, msg): - cache.get_or_set("brian") - with self.assertRaisesMessage(TypeError, msg): - cache.get_or_set("brian", version=1) - self.assertIsNone(cache.get("brian", version=1)) - self.assertEqual(cache.get_or_set("brian", 42, version=1), 42) - self.assertEqual(cache.get_or_set("brian", 1979, version=2), 1979) - self.assertIsNone(cache.get("brian", version=3)) - - def test_get_or_set_racing(self): - with mock.patch(f"{settings.CACHES['default']['BACKEND']}.add") as cache_add: - # Simulate cache.add() failing to add a value. In that case, the - # default value should be returned. - cache_add.return_value = False - self.assertEqual(cache.get_or_set("key", "default"), "default") - - def test_collection_has_indexes(self): - indexes = list(cache.collection_for_read.list_indexes()) - self.assertTrue( - any( - index["key"] == SON([("expires_at", 1)]) and index.get("expireAfterSeconds") == 0 - for index in indexes - ) - ) - self.assertTrue( - any( - index["key"] == SON([("key", 1)]) and index.get("unique") is True - for index in indexes - ) - ) - - def test_serializer_dumps(self): - self.assertEqual(cache.serializer.dumps(123), 123) - self.assertIsInstance(cache.serializer.dumps(True), bytes) - self.assertIsInstance(cache.serializer.dumps("abc"), bytes) - - -class DBCacheRouter: - """A router that puts the cache table on the 'other' database.""" - - def db_for_read(self, model, **hints): - if model._meta.app_label == "django_cache": - return "other" - return None - - def db_for_write(self, model, **hints): - if model._meta.app_label == "django_cache": - return "other" - return None - - def allow_migrate(self, db, app_label, **hints): - if app_label == "django_cache": - return db == "other" - return None - - -@override_settings( - CACHES={ - "default": { - "BACKEND": "django_mongodb_backend.cache.MongoDBCache", - "LOCATION": "my_cache_table", - }, - }, -) -@modify_settings( - INSTALLED_APPS={"prepend": "django_mongodb_backend"}, -) -class CreateCacheCollectionTests(TestCase): - databases = {"default", "other"} - - @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) - def test_createcachetable_observes_database_router(self): - # cache table should not be created on 'default' - with self.assertNumQueries(0, using="default"): - management.call_command("createcachecollection", database="default", verbosity=0) - # cache table should be created on 'other' - # Queries: - # 1: Create indexes - with self.assertNumQueries(1, using="other"): - management.call_command("createcachecollection", database="other", verbosity=0) From 137f2a68cfd004890b95af4867328ee02bfe38da Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Mon, 28 Jul 2025 10:34:56 -0400 Subject: [PATCH 19/21] Bump version to 5.1.0b4 --- django_mongodb_backend/__init__.py | 2 +- docs/source/releases/5.1.x.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/django_mongodb_backend/__init__.py b/django_mongodb_backend/__init__.py index c448b7a30..147c52b9f 100644 --- a/django_mongodb_backend/__init__.py +++ b/django_mongodb_backend/__init__.py @@ -1,4 +1,4 @@ -__version__ = "5.1.0b4.dev0" +__version__ = "5.1.0b4" # Check Django compatibility before other imports which may fail if the # wrong version of Django is installed. diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index 460daa4ec..e607510ec 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -5,7 +5,7 @@ Django MongoDB Backend 5.1.x 5.1.0 beta 4 ============ -*Unreleased* +*August 12, 2025* - Backward-incompatible: Removed support for database caching as the MongoDB security team considers the cache backend's ``pickle`` encoding of cached values a From b52ad4314f42146f1e58c8662faccd532d421d34 Mon Sep 17 00:00:00 2001 From: Jib Date: Wed, 13 Aug 2025 09:30:41 -0400 Subject: [PATCH 20/21] Update release date for 5.1.0b4 --- docs/source/releases/5.1.x.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/releases/5.1.x.rst b/docs/source/releases/5.1.x.rst index e607510ec..e92ba52dc 100644 --- a/docs/source/releases/5.1.x.rst +++ b/docs/source/releases/5.1.x.rst @@ -5,7 +5,7 @@ Django MongoDB Backend 5.1.x 5.1.0 beta 4 ============ -*August 12, 2025* +*August 13, 2025* - Backward-incompatible: Removed support for database caching as the MongoDB security team considers the cache backend's ``pickle`` encoding of cached values a From 95b8f4d5fee5ba6edd9f65f645396182a0144bbe Mon Sep 17 00:00:00 2001 From: "mongodb-dbx-release-bot[bot]" <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 14:32:54 +0000 Subject: [PATCH 21/21] BUMP 5.1.0b5.dev0 Signed-off-by: mongodb-dbx-release-bot[bot] <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> --- django_mongodb_backend/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django_mongodb_backend/__init__.py b/django_mongodb_backend/__init__.py index 147c52b9f..a8901eb59 100644 --- a/django_mongodb_backend/__init__.py +++ b/django_mongodb_backend/__init__.py @@ -1,4 +1,4 @@ -__version__ = "5.1.0b4" +__version__ = "5.1.0b5.dev0" # Check Django compatibility before other imports which may fail if the # wrong version of Django is installed.