diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..28edad5 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +source = caching diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..8be490c --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,86 @@ +name: lint-test + +on: + pull_request: + branches: + - main + push: + branches: + - main + schedule: + # run once a week on early monday mornings + - cron: "22 2 * * 1" + +jobs: + pre-commit: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.0 + + test-job: + runs-on: ubuntu-20.04 + strategy: + matrix: + # tox-gh-actions will only run the tox environments which match the currently + # running python-version. See [gh-actions] in tox.ini for the mapping + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] + # Service containers to run with `test-job` + services: + memcached: + image: memcached + ports: + - 11211:11211 + redis: + image: redis + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + postgres-default: + image: postgres + env: + POSTGRES_USER: default + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # use port 5432 for default DB + - 5432:5432 + postgres-primary2: + image: postgres + env: + POSTGRES_USER: primary2 + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # use port 5433 for primary2 DB + - 5433:5432 + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: "**/dev-requirements.txt" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r dev-requirements.txt + - name: Test with tox + env: + DATABASE_URL: postgres://default:postgres@localhost:5432/default + DATABASE_URL_2: postgres://primary2:postgres@localhost:5433/primary2 + run: tox diff --git a/.gitignore b/.gitignore index a2220da..a1cbc95 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,9 @@ +.coverage +.tox docs/_build *.py[co] +*.egg-info +*~ +.idea +.direnv +.envrc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..7b35366 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.4.0 + hooks: + - id: check-added-large-files + - id: check-merge-conflict + - id: check-yaml + - id: debug-statements + - id: detect-private-key + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + - repo: https://github.com/pycqa/isort + rev: 5.11.5 + hooks: + - id: isort + args: ["--profile", "black", "--filter-files"] + - repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + language_version: python3 + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.6.0" + hooks: + - id: prettier diff --git a/README.rst b/README.rst index 48c8463..4ed2704 100644 --- a/README.rst +++ b/README.rst @@ -5,36 +5,36 @@ Cache Machine Cache Machine provides automatic caching and invalidation for Django models through the ORM. -For full docs, see http://jbalogh.me/projects/cache-machine. +For full docs, see https://cache-machine.readthedocs.org/en/latest/. + +.. image:: https://github.com/django-cache-machine/django-cache-machine/actions/workflows/ci.yaml/badge.svg + :target: https://github.com/django-cache-machine/django-cache-machine/actions/workflows/ci.yaml Requirements ------------ -Cache Machine requires Django 1.2 (currently on trunk). It was written and -tested on Python 2.6. +Cache Machine currently works with: + +* Django 2.2, 3.0, 3.1, 3.2, and 4.0 +* Python 3.6, 3.7, 3.8, 3.9, and 3.10 +The last version to support Python 2.7 and Django 1.11 is ``django-cache-machine==1.1.0``. Installation ------------ - Get it from `pypi `_:: pip install django-cache-machine -or `github `_:: - - pip install -e git://github.com/jbalogh/django-cache-machine.git#egg=django-cache-machine - Running Tests ------------- +Get it from `github `_:: -Get it from `github `_:: - - git clone git://github.com/jbalogh/django-cache-machine.git + git clone git://github.com/django-cache-machine/django-cache-machine.git cd django-cache-machine - pip install -r requirements.txt - fab test + pip install -r dev-requirements.txt + python run_tests.py diff --git a/caching/__init__.py b/caching/__init__.py index b06d400..ceb5c7f 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,4 @@ -VERSION = (0, 4) -__version__ = '.'.join(map(str, VERSION)) +from __future__ import unicode_literals + +VERSION = ("1", "2", "0") +__version__ = ".".join(VERSION) diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py deleted file mode 100644 index 48cbfd2..0000000 --- a/caching/backends/locmem.py +++ /dev/null @@ -1,31 +0,0 @@ -from django.core.cache.backends import locmem - - -# Add infinite timeout support to the locmem backend. Useful for testing. -class CacheClass(locmem.CacheClass): - - def add(self, key, value, timeout=None): - if timeout == 0: - timeout = Infinity - return super(CacheClass, self).add(key, value, timeout) - - def set(self, key, value, timeout=None): - if timeout == 0: - timeout = Infinity - return super(CacheClass, self).set(key, value, timeout) - - -class _Infinity(object): - """Always compares greater than numbers.""" - - def __radd__(self, _): - return self - - def __cmp__(self, o): - return 0 if self is o else 1 - - def __repr__(self): - return 'Infinity' - -Infinity = _Infinity() -del _Infinity diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py deleted file mode 100644 index 0ef77a4..0000000 --- a/caching/backends/memcached.py +++ /dev/null @@ -1,16 +0,0 @@ -from django.core.cache.backends import memcached -from django.utils.encoding import smart_str - - -# Add infinite timeout support to the memcached backend. -class CacheClass(memcached.CacheClass): - - def add(self, key, value, timeout=None): - if timeout is None: - timeout = self.default_timeout - return self._cache.add(smart_str(key), value, timeout) - - def set(self, key, value, timeout=None): - if timeout is None: - timeout = self.default_timeout - return self._cache.set(smart_str(key), value, timeout) diff --git a/caching/base.py b/caching/base.py index e76fb7c..4f1c82d 100644 --- a/caching/base.py +++ b/caching/base.py @@ -1,42 +1,28 @@ import functools import logging -from django.conf import settings -from django.core.cache import cache, parse_backend_uri +import django +from django.core.cache.backends.base import DEFAULT_TIMEOUT +from django.core.exceptions import EmptyResultSet from django.db import models from django.db.models import signals -from django.db.models.sql import query +from django.db.models.query import ModelIterable from django.utils import encoding -from .invalidation import invalidator, flush_key, make_key, byid +from caching import config +from caching.invalidation import byid, cache, flush_key, invalidator, make_key - -class NullHandler(logging.Handler): - - def emit(self, record): - pass - - -log = logging.getLogger('caching') -log.setLevel(logging.INFO) -log.addHandler(NullHandler()) - -FOREVER = 0 -NO_CACHE = -1 -CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') -FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) - -scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND) -cache.scheme = scheme +log = logging.getLogger("caching") class CachingManager(models.Manager): - # Tell Django to use this manager when resolving foreign keys. + # This option removed in Django 2.0 + # Tell Django to use this manager when resolving foreign keys. (Django < 2.0) use_for_related_fields = True - def get_query_set(self): - return CachingQuerySet(self.model) + def get_queryset(self): + return CachingQuerySet(self.model, using=self._db) def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) @@ -44,108 +30,145 @@ def contribute_to_class(self, cls, name): return super(CachingManager, self).contribute_to_class(cls, name) def post_save(self, instance, **kwargs): - self.invalidate(instance) + self.invalidate( + instance, is_new_instance=kwargs["created"], model_cls=kwargs["sender"] + ) def post_delete(self, instance, **kwargs): self.invalidate(instance) - def invalidate(self, *objects): + def invalidate(self, *objects, **kwargs): """Invalidate all the flush lists associated with ``objects``.""" - keys = [k for o in objects for k in o._cache_keys()] - invalidator.invalidate_keys(keys) + invalidator.invalidate_objects(objects, **kwargs) def raw(self, raw_query, params=None, *args, **kwargs): - return CachingRawQuerySet(raw_query, self.model, params=params, - using=self._db, *args, **kwargs) + return CachingRawQuerySet( + raw_query, self.model, params=params, using=self._db, *args, **kwargs + ) - def cache(self, timeout=None): - return self.get_query_set().cache(timeout) + def cache(self, timeout=DEFAULT_TIMEOUT): + return self.get_queryset().cache(timeout) def no_cache(self): - return self.cache(NO_CACHE) + return self.cache(config.NO_CACHE) -class CacheMachine(object): +class CachingModelIterable(ModelIterable): """ Handles all the cache management for a QuerySet. - Takes the string representation of a query and a function that can be - called to get an iterator over some database results. + Takes a queryset, and optionally takes a function that can be called to + get an iterator over some database results. The function is only needed + for RawQuerySets currently. """ - def __init__(self, query_string, iter_function, timeout=None): - self.query_string = query_string - self.iter_function = iter_function - self.timeout = timeout + def __init__(self, queryset, *args, **kwargs): + self.iter_function = kwargs.pop("iter_function", None) + self.timeout = kwargs.pop("timeout", queryset.timeout) + self.db = kwargs.pop("db", queryset.db) + super(CachingModelIterable, self).__init__(queryset, *args, **kwargs) def query_key(self): - """Generate the cache key for this query.""" - return make_key('qs:%s' % self.query_string, with_locale=False) + """ + Generate the cache key for this query. + + Database router info is included to avoid the scenario where related + cached objects from one DB (e.g. replica) are saved in another DB (e.g. + primary), throwing a Django ValueError in the process. Django prevents + cross DB model saving among related objects. + """ + query_db_string = "qs:%s::db:%s" % (self.queryset.query_key(), self.db) + return make_key(query_db_string, with_locale=False) + + def cache_objects(self, objects, query_key): + """Cache query_key => objects, then update the flush lists.""" + log.debug("query_key: %s" % query_key) + query_flush = flush_key(self.queryset.query_key()) + log.debug("query_flush: %s" % query_flush) + cache.add(query_key, objects, timeout=self.timeout) + invalidator.cache_objects(self.queryset.model, objects, query_key, query_flush) def __iter__(self): + if self.iter_function is not None: + # This a RawQuerySet. Use the function passed into + # the class constructor. + iterator = self.iter_function + else: + # Otherwise, use super().__iter__. + iterator = super(CachingModelIterable, self).__iter__ + + if self.timeout == config.NO_CACHE: + # no cache, just iterate and return the results + for obj in iterator(): + yield obj + return + + # Try to fetch from the cache. try: query_key = self.query_key() - except query.EmptyResultSet: - raise StopIteration + except EmptyResultSet: + return - # Try to fetch from the cache. cached = cache.get(query_key) if cached is not None: - log.debug('cache hit: %s' % self.query_string) + log.debug("cache hit: %s" % query_key) for obj in cached: obj.from_cache = True yield obj return - # Do the database query, cache it once we have all the objects. - iterator = self.iter_function() + # Use the special FETCH_BY_ID iterator if configured. + if config.FETCH_BY_ID and hasattr(self.queryset, "fetch_by_id"): + iterator = self.queryset.fetch_by_id + # No cached results. Do the database query, and cache it once we have + # all the objects. to_cache = [] - try: - while True: - obj = iterator.next() - obj.from_cache = False - to_cache.append(obj) - yield obj - except StopIteration: - if to_cache: - self.cache_objects(to_cache) - raise - - def cache_objects(self, objects): - """Cache query_key => objects, then update the flush lists.""" - query_key = self.query_key() - query_flush = flush_key(self.query_string) - cache.add(query_key, objects, timeout=self.timeout) - invalidator.cache_objects(objects, query_key, query_flush) + for obj in iterator(): + obj.from_cache = False + to_cache.append(obj) + yield obj + if to_cache or config.CACHE_EMPTY_QUERYSETS: + self.cache_objects(to_cache, query_key) class CachingQuerySet(models.query.QuerySet): + _default_timeout_pickle_key = "__DEFAULT_TIMEOUT__" + def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) - self.timeout = None + self.timeout = DEFAULT_TIMEOUT + self._iterable_class = CachingModelIterable + + def __getstate__(self): + """ + Safely pickle our timeout if it's a DEFAULT_TIMEOUT. This is not needed + by cache-machine itself, but by application code that may re-cache objects + retrieved using cache-machine. + """ + state = dict() + state.update(self.__dict__) + if self.timeout == DEFAULT_TIMEOUT: + state["timeout"] = self._default_timeout_pickle_key + return state + + def __setstate__(self, state): + """Safely unpickle our timeout if it's a DEFAULT_TIMEOUT.""" + self.__dict__.update(state) + if self.timeout == self._default_timeout_pickle_key: + self.timeout = DEFAULT_TIMEOUT def flush_key(self): return flush_key(self.query_key()) def query_key(self): - sql, params = self.query.get_compiler(using=self.db).as_sql() + clone = self.query.clone() + sql, params = clone.get_compiler(using=self.db).as_sql() return sql % params def iterator(self): - iterator = super(CachingQuerySet, self).iterator - if self.timeout == NO_CACHE: - return iter(iterator()) - else: - try: - # Work-around for Django #12717. - query_string = self.query_key() - except query.EmptyResultSet: - return iterator() - if FETCH_BY_ID: - iterator = self.fetch_by_id - return iter(CacheMachine(query_string, iterator, self.timeout)) + return self._iterable_class(self) def fetch_by_id(self): """ @@ -158,14 +181,15 @@ def fetch_by_id(self): """ # Include columns from extra since they could be used in the query's # order_by. - vals = self.values_list('pk', *self.query.extra.keys()) + vals = self.values_list("pk", *list(self.query.extra.keys())) pks = [val[0] for val in vals] - keys = dict((byid(self.model._cache_key(pk)), pk) for pk in pks) - cached = dict((k, v) for k, v in cache.get_many(keys).items() - if v is not None) + keys = dict((byid(self.model._cache_key(pk, self.db)), pk) for pk in pks) + cached = dict( + (k, v) for k, v in list(cache.get_many(keys).items()) if v is not None + ) # Pick up the objects we missed. - missed = [pk for key, pk in keys.items() if key not in cached] + missed = [pk for key, pk in list(keys.items()) if key not in cached] if missed: others = self.fetch_missed(missed) # Put the fetched objects back in cache. @@ -175,7 +199,7 @@ def fetch_by_id(self): new = {} # Use pks to return the objects in the correct order. - objects = dict((o.pk, o) for o in cached.values() + new.values()) + objects = dict((o.pk, o) for o in list(cached.values()) + list(new.values())) for pk in pks: yield objects[pk] @@ -185,28 +209,30 @@ def fetch_missed(self, pks): others.query.clear_limits() # Clear out the default ordering since we order based on the query. others = others.order_by().filter(pk__in=pks) - if hasattr(others, 'no_cache'): + if hasattr(others, "no_cache"): others = others.no_cache() if self.query.select_related: - others.dup_select_related(self) + others.query.select_related = self.query.select_related return others def count(self): - timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) super_count = super(CachingQuerySet, self).count - query_string = 'count:%s' % self.query_key() - if timeout is None: + try: + query_string = "count:%s" % self.query_key() + except EmptyResultSet: + return 0 + if self.timeout == config.NO_CACHE or config.TIMEOUT == config.NO_CACHE: return super_count() else: - return cached_with(self, super_count, query_string, timeout) + return cached_with(self, super_count, query_string, config.TIMEOUT) - def cache(self, timeout=None): + def cache(self, timeout=DEFAULT_TIMEOUT): qs = self._clone() qs.timeout = timeout return qs def no_cache(self): - return self.cache(NO_CACHE) + return self.cache(config.NO_CACHE) def _clone(self, *args, **kw): qs = super(CachingQuerySet, self)._clone(*args, **kw) @@ -214,77 +240,126 @@ def _clone(self, *args, **kw): return qs -class CachingMixin: +class CachingMixin(object): """Inherit from this class to get caching and invalidation helpers.""" def flush_key(self): return flush_key(self) - @property - def cache_key(self): + def get_cache_key(self, incl_db=True): """Return a cache key based on the object's primary key.""" - return self._cache_key(self.pk) + # incl_db will be False if this key is intended for use in a flush key. + # This ensures all cached copies of an object will be invalidated + # regardless of the DB on which they're modified/deleted. + return self._cache_key(self.pk, incl_db and self._state.db or None) + + cache_key = property(get_cache_key) @classmethod - def _cache_key(cls, pk): + def model_flush_key(cls): + """ + Return a cache key for the entire model (used by invalidation). + """ + # use dummy PK and DB reference that will never resolve to an actual + # cache key for an object + return flush_key(cls._cache_key("all-pks", "all-dbs")) + + @classmethod + def _cache_key(cls, pk, db=None): """ Return a string that uniquely identifies the object. For the Addon class, with a pk of 2, we get "o:addons.addon:2". """ - key_parts = ('o', cls._meta, pk) - return ':'.join(map(encoding.smart_unicode, key_parts)) + if db: + key_parts = ("o", cls._meta, pk, db) + else: + key_parts = ("o", cls._meta, pk) + return ":".join(map(encoding.smart_str, key_parts)) - def _cache_keys(self): + def _cache_keys(self, incl_db=True): """Return the cache key for self plus all related foreign keys.""" - fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields - if isinstance(f, models.ForeignKey)) - - keys = [fk.rel.to._cache_key(val) for fk, val in fks.items() - if val is not None and hasattr(fk.rel.to, '_cache_key')] - return (self.cache_key,) + tuple(keys) + fks = dict( + (f, getattr(self, f.attname)) + for f in self._meta.fields + if isinstance(f, models.ForeignKey) + ) + + keys = [] + for fk, val in list(fks.items()): + related_model = self._get_fk_related_model(fk) + if val is not None and hasattr(related_model, "_cache_key"): + keys.append( + related_model._cache_key(val, incl_db and self._state.db or None) + ) + + return (self.get_cache_key(incl_db=incl_db),) + tuple(keys) + + def _flush_keys(self): + """Return the flush key for self plus all related foreign keys.""" + return map(flush_key, self._cache_keys(incl_db=False)) + + def _get_fk_related_model(self, fk): + if django.VERSION[0] >= 2: + return fk.remote_field.model + else: + return fk.rel.to class CachingRawQuerySet(models.query.RawQuerySet): + def __init__(self, *args, **kw): + timeout = kw.pop("timeout", DEFAULT_TIMEOUT) + super(CachingRawQuerySet, self).__init__(*args, **kw) + self.timeout = timeout def __iter__(self): iterator = super(CachingRawQuerySet, self).__iter__ - sql = self.raw_query % tuple(self.params) - for obj in CacheMachine(sql, iterator): - yield obj - raise StopIteration + if self.timeout == config.NO_CACHE: + iterator = iterator() + while True: + try: + yield next(iterator) + except StopIteration: + return + else: + for obj in CachingModelIterable( + self, iter_function=iterator, timeout=self.timeout + ): + yield obj + + def query_key(self): + return self.raw_query % tuple(self.params or []) def _function_cache_key(key): - return make_key('f:%s' % key, with_locale=True) + return make_key("f:%s" % key, with_locale=True) -def cached(function, key_, duration=None): +def cached(function, key_, duration=DEFAULT_TIMEOUT): """Only calls the function if ``key`` is not already in the cache.""" key = _function_cache_key(key_) val = cache.get(key) if val is None: - log.debug('cache miss for %s' % key) + log.debug("cache miss for %s" % key) val = function() cache.set(key, val, duration) else: - log.debug('cache hit for %s' % key) + log.debug("cache hit for %s" % key) return val -def cached_with(obj, f, f_key, timeout=None): +def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): """Helper for caching a function call within an object's flush list.""" + try: - obj_key = (obj.query_key() if hasattr(obj, 'query_key') - else obj.cache_key) - except AttributeError: - log.warning(u'%r cannot be cached.' % obj) + obj_key = obj.query_key() if hasattr(obj, "query_key") else obj.cache_key + except (AttributeError, EmptyResultSet): + log.warning("%r cannot be cached." % encoding.smart_str(obj)) return f() - key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) + key = "%s:%s" % tuple(map(encoding.smart_str, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. - invalidator.add_to_flush_list( - {obj.flush_key(): [_function_cache_key(key)]}) + invalidator.add_to_flush_list({obj.flush_key(): [_function_cache_key(key)]}) return cached(f, key, timeout) @@ -297,6 +372,7 @@ class cached_method(object): Lifted from werkzeug. """ + def __init__(self, func): self.func = func functools.update_wrapper(self, func) @@ -320,6 +396,7 @@ class MethodWrapper(object): The first call for a set of (args, kwargs) will use an external cache. After that, an object-local dict cache will be used. """ + def __init__(self, obj, func): self.obj = obj self.func = func @@ -327,11 +404,13 @@ def __init__(self, obj, func): self.cache = {} def __call__(self, *args, **kwargs): - k = lambda o: o.cache_key if hasattr(o, 'cache_key') else o - arg_keys = map(k, args) - kwarg_keys = [(key, k(val)) for key, val in kwargs.items()] - key = 'm:%s:%s:%s:%s' % (self.obj.cache_key, self.func.__name__, - arg_keys, kwarg_keys) + def k(o): + return o.cache_key if hasattr(o, "cache_key") else o + + arg_keys = list(map(k, args)) + kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())] + key_parts = ("m", self.obj.cache_key, self.func.__name__, arg_keys, kwarg_keys) + key = ":".join(map(encoding.smart_str, key_parts)) if key not in self.cache: f = functools.partial(self.func, self.obj, *args, **kwargs) self.cache[key] = cached_with(self.obj, f, key) diff --git a/caching/config.py b/caching/config.py new file mode 100644 index 0000000..f8b015c --- /dev/null +++ b/caching/config.py @@ -0,0 +1,22 @@ +from django.conf import settings + +NO_CACHE = -1 +WHOLE_MODEL = "whole-model" + +CACHE_PREFIX = getattr(settings, "CACHE_PREFIX", "") +FETCH_BY_ID = getattr(settings, "FETCH_BY_ID", False) +FLUSH = CACHE_PREFIX + ":flush:" +CACHE_EMPTY_QUERYSETS = getattr(settings, "CACHE_EMPTY_QUERYSETS", False) +TIMEOUT = getattr(settings, "CACHE_COUNT_TIMEOUT", NO_CACHE) +CACHE_INVALIDATE_ON_CREATE = getattr(settings, "CACHE_INVALIDATE_ON_CREATE", None) +CACHE_MACHINE_NO_INVALIDATION = getattr( + settings, "CACHE_MACHINE_NO_INVALIDATION", False +) +CACHE_MACHINE_USE_REDIS = getattr(settings, "CACHE_MACHINE_USE_REDIS", False) + +_invalidate_on_create_values = (None, WHOLE_MODEL) +if CACHE_INVALIDATE_ON_CREATE not in _invalidate_on_create_values: + raise ValueError( + "CACHE_INVALIDATE_ON_CREATE must be one of: " + "%s" % _invalidate_on_create_values + ) diff --git a/caching/ext.py b/caching/ext.py index b8fe3d5..ac6bd8f 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -1,6 +1,5 @@ from django.conf import settings from django.utils import encoding - from jinja2 import nodes from jinja2.ext import Extension @@ -19,7 +18,8 @@ class FragmentCacheExtension(Extension): Derived from the jinja2 documentation example. """ - tags = set(['cache']) + + tags = set(["cache"]) def __init__(self, environment): super(FragmentCacheExtension, self).__init__(environment) @@ -33,33 +33,34 @@ def parse(self, parser): # we only listen to ``'cache'`` so this will be a name token with # `cache` as value. We get the line number so that we can give # that line number to the nodes we create by hand. - lineno = parser.stream.next().lineno + lineno = next(parser.stream).lineno # Use the filename + line number and first object for the cache key. - name = '%s+%s' % (self.name, lineno) + name = "%s+%s" % (self.name, lineno) args = [nodes.Const(name), parser.parse_expression()] # If there is a comma, the user provided a timeout. If not, use # None as second parameter. timeout = nodes.Const(None) extra = nodes.Const([]) - while parser.stream.skip_if('comma'): + while parser.stream.skip_if("comma"): x = parser.parse_expression() - if parser.stream.current.type == 'assign': + if parser.stream.current.type == "assign": next(parser.stream) extra = parser.parse_expression() else: timeout = x args.extend([timeout, extra]) - body = parser.parse_statements(['name:endcache'], drop_needle=True) + body = parser.parse_statements(["name:endcache"], drop_needle=True) self.process_cache_arguments(args) # now return a `CallBlock` node that calls our _cache_support # helper method on this extension. - return nodes.CallBlock(self.call_method('_cache_support', args), - [], [], body).set_lineno(lineno) + return nodes.CallBlock( + self.call_method("_cache_support", args), [], [], body + ).set_lineno(lineno) def process_cache_arguments(self, args): """Extension point for adding anything extra to the cache_support.""" @@ -67,10 +68,10 @@ def process_cache_arguments(self, args): def _cache_support(self, name, obj, timeout, extra, caller): """Cache helper callback.""" - if settings.TEMPLATE_DEBUG: + if settings.DEBUG: return caller() - extra = ':'.join(map(encoding.smart_str, extra)) - key = 'fragment:%s:%s' % (name, extra) + extra = ":".join(map(encoding.smart_str, extra)) + key = "fragment:%s:%s" % (name, extra) return caching.base.cached_with(obj, caller, key, timeout) diff --git a/caching/invalidation.py b/caching/invalidation.py index 14c6db3..4481c16 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -3,29 +3,35 @@ import hashlib import logging import socket +from urllib.parse import parse_qsl from django.conf import settings -from django.core.cache import cache, parse_backend_uri +from django.core.cache import cache as default_cache +from django.core.cache import caches +from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation +from caching import config + try: import redis as redislib except ImportError: redislib = None +# Look for an own cache first before falling back to the default cache +try: + cache = caches["cache_machine"] +except (InvalidCacheBackendError, ValueError): + cache = default_cache -CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') -FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -FLUSH = CACHE_PREFIX + ':flush:' - -log = logging.getLogger('caching.invalidation') +log = logging.getLogger("caching.invalidation") def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k)) + key = encoding.smart_bytes("%s:%s" % (config.CACHE_PREFIX, k)) if with_locale: - key += encoding.smart_str(translation.get_language()) + key += encoding.smart_bytes(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. return hashlib.md5(key).hexdigest() @@ -33,13 +39,13 @@ def make_key(k, with_locale=True): def flush_key(obj): """We put flush lists in the flush: namespace.""" - key = obj if isinstance(obj, basestring) else obj.cache_key - return FLUSH + make_key(key, with_locale=False) + key = obj if isinstance(obj, str) else obj.get_cache_key(incl_db=False) + return config.FLUSH + make_key(key, with_locale=False) def byid(obj): - key = obj if isinstance(obj, basestring) else obj._cache_key(obj.pk) - return make_key('byid:' + key) + key = obj if isinstance(obj, str) else obj.cache_key + return make_key("byid:" + key) def safe_redis(return_type): @@ -48,37 +54,51 @@ def safe_redis(return_type): return_type (optionally a callable) will be returned if there is an error. """ + def decorator(f): @functools.wraps(f) def wrapper(*args, **kw): try: return f(*args, **kw) - except (socket.error, redislib.RedisError), e: - log.error('redis error: %s' % e) - log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) - if hasattr(return_type, '__call__'): + except (socket.error, redislib.RedisError) as e: + log.error("redis error: %s" % e) + # log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) + if hasattr(return_type, "__call__"): return return_type() else: return return_type + return wrapper - return decorator + return decorator class Invalidator(object): - - def invalidate_keys(self, keys): - """Invalidate all the flush lists named by the list of ``keys``.""" - if not keys: + def invalidate_objects(self, objects, is_new_instance=False, model_cls=None): + """Invalidate all the flush lists for the given ``objects``.""" + obj_keys = [k for o in objects for k in o._cache_keys()] + flush_keys = [k for o in objects for k in o._flush_keys()] + # If whole-model invalidation on create is enabled, include this model's + # key in the list to be invalidated. Note that the key itself won't + # contain anything in the cache, but its corresponding flush key will. + if ( + config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL + and is_new_instance + and model_cls + and hasattr(model_cls, "model_flush_key") + ): + flush_keys.append(model_cls.model_flush_key()) + if not obj_keys or not flush_keys: return - flush, flush_keys = self.find_flush_lists(keys) - - if flush: - cache.set_many(dict((k, None) for k in flush), 5) + obj_keys, flush_keys = self.expand_flush_lists(obj_keys, flush_keys) + if obj_keys: + log.debug("deleting object keys: %s" % obj_keys) + cache.delete_many(obj_keys) if flush_keys: + log.debug("clearing flush lists: %s" % flush_keys) self.clear_flush_lists(flush_keys) - def cache_objects(self, objects, query_key, query_flush): + def cache_objects(self, model, objects, query_key, query_flush): # Add this query to the flush list of each object. We include # query_flush so that other things can be cached against the queryset # and still participate in invalidation. @@ -86,46 +106,56 @@ def cache_objects(self, objects, query_key, query_flush): flush_lists = collections.defaultdict(set) for key in flush_keys: + log.debug("adding %s to %s" % (query_flush, key)) flush_lists[key].add(query_flush) flush_lists[query_flush].add(query_key) - + # Add this query to the flush key for the entire model, if enabled + model_flush = model.model_flush_key() + if config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL: + flush_lists[model_flush].add(query_key) # Add each object to the flush lists of its foreign keys. for obj in objects: obj_flush = obj.flush_key() - for key in map(flush_key, obj._cache_keys()): - if key != obj_flush: + for key in obj._flush_keys(): + if key not in (obj_flush, model_flush): + log.debug("related: adding %s to %s" % (obj_flush, key)) flush_lists[key].add(obj_flush) - if FETCH_BY_ID: + if config.FETCH_BY_ID: flush_lists[key].add(byid(obj)) self.add_to_flush_list(flush_lists) - def find_flush_lists(self, keys): + def expand_flush_lists(self, obj_keys, flush_keys): """ Recursively search for flush lists and objects to invalidate. The search starts with the lists in `keys` and expands to any flush lists found therein. Returns ({objects to flush}, {flush keys found}). """ - new_keys = keys = set(map(flush_key, keys)) - flush = set(keys) + log.debug("in expand_flush_lists") + obj_keys = set(obj_keys) + search_keys = flush_keys = set(flush_keys) # Add other flush keys from the lists, which happens when a parent # object includes a foreign key. while 1: - to_flush = self.get_flush_lists(new_keys) - flush.update(to_flush) - new_keys = set(k for k in to_flush if k.startswith(FLUSH)) - diff = new_keys.difference(keys) - if diff: - keys.update(new_keys) + new_keys = set() + for key in self.get_flush_lists(search_keys): + if key.startswith(config.FLUSH): + new_keys.add(key) + else: + obj_keys.add(key) + if new_keys: + log.debug("search for %s found keys %s" % (search_keys, new_keys)) + flush_keys.update(new_keys) + search_keys = new_keys else: - return flush, keys + return obj_keys, flush_keys def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" flush_lists = collections.defaultdict(set) - flush_lists.update(cache.get_many(mapping.keys())) - for key, list_ in mapping.items(): + flush_lists.update(cache.get_many(list(mapping.keys()))) + for key, list_ in list(mapping.items()): if flush_lists[key] is None: flush_lists[key] = set(list_) else: @@ -134,9 +164,11 @@ def add_to_flush_list(self, mapping): def get_flush_lists(self, keys): """Return a set of object keys from the lists in `keys`.""" - return set(e for flush_list in - filter(None, cache.get_many(keys).values()) - for e in flush_list) + return set( + e + for flush_list in [_f for _f in list(cache.get_many(keys).values()) if _f] + for e in flush_list + ) def clear_flush_lists(self, keys): """Remove the given keys from the database.""" @@ -144,67 +176,95 @@ def clear_flush_lists(self, keys): class RedisInvalidator(Invalidator): - def safe_key(self, key): - if ' ' in key or '\n' in key: + if " " in key or "\n" in key: log.warning('BAD KEY: "%s"' % key) - return '' + return "" return key @safe_redis(None) def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" pipe = redis.pipeline(transaction=False) - for key, list_ in mapping.items(): + for key, list_ in list(mapping.items()): for query_key in list_: - pipe.sadd(self.safe_key(key), query_key) + # Redis happily accepts unicode, but returns byte strings, + # so manually encode and decode the keys on the flush list here + pipe.sadd(self.safe_key(key), query_key.encode("utf-8")) pipe.execute() @safe_redis(set) def get_flush_lists(self, keys): - return redis.sunion(map(self.safe_key, keys)) + flush_list = redis.sunion(list(map(self.safe_key, keys))) + return [k.decode("utf-8") for k in flush_list] @safe_redis(None) def clear_flush_lists(self, keys): - redis.delete(*map(self.safe_key, keys)) + redis.delete(*list(map(self.safe_key, keys))) class NullInvalidator(Invalidator): - def add_to_flush_list(self, mapping): return +def parse_backend_uri(backend_uri): + """ + Converts the "backend_uri" into a host and any extra params that are + required for the backend. Returns a (host, params) tuple. + """ + backend_uri_sliced = backend_uri.split("://") + if len(backend_uri_sliced) > 2: + raise InvalidCacheBackendError("Backend URI can't have more than one scheme://") + elif len(backend_uri_sliced) == 2: + rest = backend_uri_sliced[1] + else: + rest = backend_uri_sliced[0] + + host = rest + qpos = rest.find("?") + if qpos != -1: + params = dict(parse_qsl(rest[qpos + 1 :])) + host = rest[:qpos] + else: + params = {} + if host.endswith("/"): + host = host[:-1] + + return host, params + + def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. - _, server, params = parse_backend_uri(settings.REDIS_BACKEND) - db = params.pop('db', 1) + server, params = parse_backend_uri(settings.REDIS_BACKEND) + db = params.pop("db", 0) try: db = int(db) except (ValueError, TypeError): - db = 1 + db = 0 try: - socket_timeout = float(params.pop('socket_timeout')) + socket_timeout = float(params.pop("socket_timeout")) except (KeyError, ValueError): socket_timeout = None - password = params.pop('password', None) - if ':' in server: - host, port = server.split(':') + password = params.pop("password", None) + if ":" in server: + host, port = server.split(":") try: port = int(port) except (ValueError, TypeError): port = 6379 else: - host = 'localhost' + host = "localhost" port = 6379 - return redislib.Redis(host=host, port=port, db=db, password=password, - socket_timeout=socket_timeout) + return redislib.Redis( + host=host, port=port, db=db, password=password, socket_timeout=socket_timeout + ) -if getattr(settings, 'CACHE_MACHINE_NO_INVALIDATION', False): +if config.CACHE_MACHINE_NO_INVALIDATION: invalidator = NullInvalidator() -elif getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): +elif config.CACHE_MACHINE_USE_REDIS: redis = get_redis_backend() invalidator = RedisInvalidator() else: diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..8954ca1 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,12 @@ +# These are the reqs to build docs and run tests. +sphinx +django-redis +jinja2 +redis +flake8 +coverage +psycopg2-binary +dj-database-url +python-memcached>=1.58 +tox +tox-gh-actions diff --git a/docs/conf.py b/docs/conf.py index 71f3df3..f58ba74 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,21 +1,22 @@ import os import sys -sys.path.append(os.path.abspath('..')) - import caching +sys.path.append(os.path.abspath("..")) + + # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" -extensions = ['sphinx.ext.autodoc'] +extensions = ["sphinx.ext.autodoc"] # General information about the project. -project = u'Cache Machine' -copyright = u'2010, The Zamboni Collective' +project = "Cache Machine" +copyright = "2010, The Zamboni Collective" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -27,4 +28,4 @@ # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_trees = ['_build'] +exclude_trees = ["_build"] diff --git a/docs/index.rst b/docs/index.rst index 97616e2..a38353a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,35 +6,19 @@ Cache Machine Cache Machine provides automatic caching and invalidation for Django models through the ORM. The code is hosted on -`github `_. +`github `_. + +For an overview of new features and backwards-incompatible changes which may +affect you, please see the :ref:`release-notes`. Settings -------- -Before we start, you'll have to update your ``settings.py`` to use one of the -caching backends provided by Cache Machine. Django's built-in caching backends -don't allow infinite cache timeouts, which are critical for doing invalidation -(see below). Cache Machine extends the ``locmem`` and ``memcached`` backends -provided by Django to enable indefinite caching when a timeout of ``0`` is -passed. If you were already using one of these backends, you can probably go -on using them just as you were. If you were caching things with a timeout of -``0``, there will be problems with those entities now getting cached forever. -You shouldn't have been doing that anyways. - -For memcached:: - - CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' - -For locmem (only recommended for testing):: - - CACHE_BACKEND = 'caching.backends.locmem://' - -Cache Machine will not work properly with the file or database cache backends. - -If you want to set a prefix for all keys in Cache Machine, define -``CACHE_PREFIX`` in settings.py:: +Older versions of Cache Machine required you to use customized cache backends. These are no longer +needed and they have been removed from Cache Machine. Use the standard Django cache backends. - CACHE_PREFIX = 'weee:' +COUNT queries +^^^^^^^^^^^^^ Calls to ``QuerySet.count()`` can be cached, but they cannot be reliably invalidated. Cache Machine would have to do a full select to figure out the @@ -44,6 +28,31 @@ short enough that stale counts won't be a big deal. :: CACHE_COUNT_TIMEOUT = 60 # seconds, not too long. +By default, calls to ``QuerySet.count()`` are not cached. They are only cached +if ``CACHE_COUNT_TIMEOUT`` is set to a value other than +``caching.base.NO_CACHE``. + +Empty querysets +^^^^^^^^^^^^^^^ + +By default cache machine will not cache empty querysets. To cache them:: + + CACHE_EMPTY_QUERYSETS = True + +.. _object-creation: + +Object creation +^^^^^^^^^^^^^^^ + +By default Cache Machine does not invalidate queries when a new object is +created, because it can be expensive to maintain a flush list of all the +queries associated with a given table and cause significant disruption on +high-volume sites when *all* the queries for a particular model are +invalidated at once. If these are not issues for your site and immediate +inclusion of created objects in previously cached queries is desired, you +can enable this feature as follows:: + + CACHE_INVALIDATE_ON_CREATE = 'whole-model' Cache Manager ------------- @@ -53,19 +62,23 @@ that class and inherit from the :class:`~caching.base.CachingMixin`. If you want related lookups (foreign keys) to hit the cache, ``CachingManager`` must be the default manager. If you have multiple managers that should be cached, return a :class:`~caching.base.CachingQuerySet` from the other manager's -``get_query_set`` method instead of subclassing ``CachingManager``, since that +``get_queryset`` method instead of subclassing ``CachingManager``, since that would hook up the post_save and post_delete signals multiple times. Here's what a minimal cached model looks like:: from django.db import models - import caching.base + from caching.base import CachingManager, CachingMixin - class Zomg(caching.base.CachingMixin, models.Model): + class Zomg(CachingMixin, models.Model): val = models.IntegerField() - objects = caching.base.CachingManager() + objects = CachingManager() + + # if you use Django 2.0 or later, you must set base_manager_name + class Meta: + base_manager_name = 'objects' # Attribute name of CachingManager(), above Whenever you run a query, ``CachingQuerySet`` will try to find that query in the cache. Queries are keyed by ``{prefix}:{sql}``. If it's there, we return @@ -92,7 +105,7 @@ cleared. To avoid stale foreign key relations, any cached objects will be flushed when the object their foreign key points to is invalidated. During cache invalidation, we explicitly set a None value instead of just -deleting so we don't have any race condtions where: +deleting so we don't have any race conditions where: * Thread 1 -> Cache miss, get object from DB * Thread 2 -> Object saved, deleted from cache @@ -104,6 +117,23 @@ The foundations of this module were derived from `Mike Malone's`_ .. _`Mike Malone's`: http://immike.net/ .. _django-caching: http://github.com/mmalone/django-caching/ +Changing the timeout of a CachingQuerySet instance +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, the timeout for a ``CachingQuerySet`` instance will be the timeout +of the underlying cache being used by Cache Machine. To change the timeout of +a ``CachingQuerySet`` instance, you can assign a different value to the +``timeout`` attribute which represents the number of seconds to cache for + +For example:: + + def get_objects(name): + qs = CachedClass.objects.filter(name=name) + qs.timeout = 5 # seconds + return qs + +To disable caching for a particular ``CachingQuerySet`` instance, set the +``timeout`` attribute to ``caching.base.NO_CACHE``. Manual Caching -------------- @@ -153,10 +183,30 @@ If someone wants to write a template tag for Django templates, I'd love to add it. -Classes that May Interest You +Redis Support +------------- + +Cache Machine support storing flush lists in Redis rather than memcached, which +is more efficient because Redis can manipulate the lists on the server side +rather than having to transfer the entire list back and forth for each +modification. + +To enable Redis support for Cache Machine, add the following to your settings +file, replacing ``localhost`` with the hostname of your Redis server:: + + CACHE_MACHINE_USE_REDIS = True + REDIS_BACKEND = 'redis://localhost:6379' + +.. note:: + When using Redis, memcached is still used for caching model objects, i.e., + only the flush lists are stored in Redis. You still need to configure + ``CACHES`` the way you would normally for Cache Machine. + + +Classes That May Interest You ----------------------------- -.. autoclass:: caching.base.CacheMachine +.. autoclass:: caching.base.CachingModelIterable .. autoclass:: caching.base.CachingManager :members: diff --git a/docs/releases.rst b/docs/releases.rst new file mode 100644 index 0000000..f5f8ad8 --- /dev/null +++ b/docs/releases.rst @@ -0,0 +1,79 @@ +.. _release-notes: + +Release Notes +================== + +v1.2.0 (2022-07-06) +------------------- + +- Drop official support for unsupported Django versions (1.11, 2.0, 2.1) +- Add support for Django 3.0, 3.1, 3.2, and 4.0 (thanks, @johanneswilm and @Afani97!) +- Add support for Python 3.8, 3.9, and 3.10 +- Switch to GitHub Actions + +v1.1.0 (2019-02-17) +------------------- + +- Drop official support for unsupported Django versions (1.8, 1.9, and 1.10) +- Add support for Django 2.0, 2.1, and 2.2 (thanks, @JungleKim and @wetneb!) +- Add support for Python 3.7 +- Fix Travis + +v1.0.0 (2017-10-13) +------------------- + +- Update Travis and Tox configurations +- Drop support for Python < 2.7 +- Add support for Python 3.5 and 3.6 +- Drop support for Django < 1.8 +- Add support for Django 1.9, 1.10, and 1.11 +- Removed all custom cache backends. +- Flake8 fixes + +Backwards Incompatible Changes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Cache Machine previously included custom backends for LocMem, Memcached and PyLibMemcached. These + were necessary because the core backends in old versions of Django did not support infinte + timeouts. They now do, so Cache Machine's custom backends are no longer necessary. They have been + removed, so you should revert to using the core Django backends. + +v0.9.1 (2015-10-22) +------------------- + +- Fix bug that prevented objects retrieved via cache machine from being + re-cached by application code (see PR #103) +- Fix bug that prevented caching objects forever when using Django <= 1.5 + (see PR #104) +- Fix regression (introduced in 0.8) that broke invalidation when an object + was cached via a replica database and later modified or deleted via the + primary database, when using primary/replica replication (see PR #105). Note + this change may cause unexpected invalidation when sharding across DBs + that share both a schema and primary key values or other attributes. + +v0.9 (2015-07-29) +----------------- + +- Support for Python 3 +- A new setting, ``CACHE_INVALIDATE_ON_CREATE``, which facilitates invalidation + when a new model object is created. For more information, see + :ref:`object-creation`. + +v0.8.1 (2015-07-03) +----------------------- + +This release is primarily aimed at adding support for more recent versions of +Django and catching up on recent contributions. + +- Support for Django 1.7 and Django 1.8 +- Fix bug in parsing of ``REDIS_BACKEND`` URI +- Miscellaneous bug fixes and documentation corrections + +Backwards Incompatible Changes +________________________________ + +- Dropped support for the old style ``caching.backends.memcached.CacheClass`` and + ``caching.backends.locmem.CacheClass`` classes. Support for this naming + has been deprecated since Django 1.3. You will need to switch your project + to use ``MemcachedCache``, ``PyLibMCCache``, or ``LocMemCache`` in place of + ``CacheClass``. diff --git a/examples/cache-machine/__init__.py b/examples/cache-machine/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/examples/cache-machine/locmem_settings.py b/examples/cache-machine/locmem_settings.py deleted file mode 100644 index de85736..0000000 --- a/examples/cache-machine/locmem_settings.py +++ /dev/null @@ -1,3 +0,0 @@ -from settings import * - -CACHE_BACKEND = 'caching.backends.locmem://' diff --git a/examples/cache-machine/memcache_byid.py b/examples/cache-machine/memcache_byid.py deleted file mode 100644 index 85d711d..0000000 --- a/examples/cache-machine/memcache_byid.py +++ /dev/null @@ -1,3 +0,0 @@ -from settings import * - -FETCH_BY_ID = True diff --git a/examples/cache-machine/redis_byid.py b/examples/cache-machine/redis_byid.py deleted file mode 100644 index 0504351..0000000 --- a/examples/cache-machine/redis_byid.py +++ /dev/null @@ -1,3 +0,0 @@ -from redis_settings import * - -FETCH_BY_ID = True diff --git a/examples/cache-machine/redis_settings.py b/examples/cache-machine/redis_settings.py deleted file mode 100644 index a11fa00..0000000 --- a/examples/cache-machine/redis_settings.py +++ /dev/null @@ -1,4 +0,0 @@ -from settings import * - -CACHE_MACHINE_USE_REDIS = True -REDIS_BACKEND = 'redis://' diff --git a/examples/cache-machine/settings.py b/examples/cache-machine/settings.py deleted file mode 100644 index 56192b8..0000000 --- a/examples/cache-machine/settings.py +++ /dev/null @@ -1,14 +0,0 @@ -CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' - -TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner' - -DATABASES = { - 'default': { - 'NAME': 'test.db', - 'ENGINE': 'django.db.backends.sqlite3', - } -} - -INSTALLED_APPS = ( - 'django_nose', -) diff --git a/caching/backends/__init__.py b/examples/cache_machine/__init__.py similarity index 100% rename from caching/backends/__init__.py rename to examples/cache_machine/__init__.py diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py new file mode 100644 index 0000000..6941bfa --- /dev/null +++ b/examples/cache_machine/custom_backend.py @@ -0,0 +1,12 @@ +# flake8: noqa +from .settings import * + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + }, + "cache_machine": { + "BACKEND": "django.core.cache.backends.memcached.MemcachedCache", + "LOCATION": "localhost:11211", + }, +} diff --git a/examples/cache_machine/django_redis_settings.py b/examples/cache_machine/django_redis_settings.py new file mode 100644 index 0000000..941e7ea --- /dev/null +++ b/examples/cache_machine/django_redis_settings.py @@ -0,0 +1,9 @@ +# flake8: noqa +from .redis_settings import * + +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://127.0.0.1:6379/0", + }, +} diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py new file mode 100644 index 0000000..054826c --- /dev/null +++ b/examples/cache_machine/locmem_settings.py @@ -0,0 +1,8 @@ +# flake8: noqa +from .settings import * + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + }, +} diff --git a/examples/cache_machine/memcache_byid.py b/examples/cache_machine/memcache_byid.py new file mode 100644 index 0000000..edb30f0 --- /dev/null +++ b/examples/cache_machine/memcache_byid.py @@ -0,0 +1,4 @@ +# flake8: noqa +from .settings import * + +FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_byid.py b/examples/cache_machine/redis_byid.py new file mode 100644 index 0000000..fa2369d --- /dev/null +++ b/examples/cache_machine/redis_byid.py @@ -0,0 +1,4 @@ +# flake8: noqa +from .redis_settings import * + +FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_settings.py b/examples/cache_machine/redis_settings.py new file mode 100644 index 0000000..3263e66 --- /dev/null +++ b/examples/cache_machine/redis_settings.py @@ -0,0 +1,5 @@ +# flake8: noqa +from .settings import * + +CACHE_MACHINE_USE_REDIS = True +REDIS_BACKEND = "redis://" diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py new file mode 100644 index 0000000..72d8036 --- /dev/null +++ b/examples/cache_machine/settings.py @@ -0,0 +1,40 @@ +import os + +import dj_database_url +import django + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.memcached.MemcachedCache", + "LOCATION": "localhost:11211", + }, +} + +DATABASES = { + "default": dj_database_url.config(default="postgres:///cache_machine_devel"), + "primary2": dj_database_url.parse( + os.getenv("DATABASE_URL_2", "postgres:///cache_machine_devel2") + ), +} +for primary, replica in (("default", "replica"), ("primary2", "replica2")): + DATABASES[replica] = DATABASES[primary].copy() + DATABASES[replica]["TEST"] = {"MIRROR": primary} + +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + +INSTALLED_APPS = ("tests.testapp",) + +SECRET_KEY = "ok" + +MIDDLEWARE_CLASSES = ( + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.auth.middleware.SessionAuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +) + +if django.VERSION[0] >= 2: + MIDDLEWARE = MIDDLEWARE_CLASSES diff --git a/fabfile.py b/fabfile.py deleted file mode 100644 index e953208..0000000 --- a/fabfile.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Creating standalone Django apps is a PITA because you're not in a project, so -you don't have a settings.py file. I can never remember to define -DJANGO_SETTINGS_MODULE, so I run these commands which get the right env -automatically. -""" -import functools -import os - -from fabric.api import local, cd, env -from fabric.contrib.project import rsync_project - -NAME = os.path.basename(os.path.dirname(__file__)) -ROOT = os.path.abspath(os.path.dirname(__file__)) - -os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, - os.path.join(ROOT, 'examples')]) - -env.hosts = ['jbalogh.me'] - -local = functools.partial(local, capture=False) - - -def doc(kind='html'): - with cd('docs'): - local('make clean %s' % kind) - - -SETTINGS = ('locmem_settings', 'settings', 'memcache_byid', - 'redis_settings', 'redis_byid') - -def test(): - for settings in SETTINGS: - print settings - os.environ['DJANGO_SETTINGS_MODULE'] = 'cache-machine.%s' % settings - local('django-admin.py test') - - -def updoc(): - doc('dirhtml') - rsync_project('p/%s' % NAME, 'docs/_build/dirhtml/', delete=True) diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 28477bb..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# These are the reqs to build docs and run tests. -sphinx -mock --e git://github.com/jbalogh/django-nose.git@6f060d49ee193a05734704820f3fea92ee1759d2#egg=django-nose --e svn+http://code.djangoproject.com/svn/django/trunk@12335#egg=Django -python-memcached --e git://github.com/jbalogh/test-utils.git#egg=test-utils -fabric -jinja2 diff --git a/run_tests.py b/run_tests.py new file mode 100644 index 0000000..1ec7ade --- /dev/null +++ b/run_tests.py @@ -0,0 +1,66 @@ +""" +Creating standalone Django apps is a PITA because you're not in a project, so +you don't have a settings.py file. I can never remember to define +DJANGO_SETTINGS_MODULE, so I run these commands which get the right env +automatically. +""" +import argparse +import os +import sys +from subprocess import call, check_output + +NAME = os.path.basename(os.path.dirname(__file__)) +ROOT = os.path.abspath(os.path.dirname(__file__)) + +os.environ["PYTHONPATH"] = os.pathsep.join([ROOT, os.path.join(ROOT, "examples")]) + +SETTINGS = ( + "locmem_settings", + "settings", + "memcache_byid", + "custom_backend", + "redis_settings", + "redis_byid", + "django_redis_settings", +) + + +def main(): + parser = argparse.ArgumentParser( + description="Run the tests for django-cache-machine. " + "If no options are specified, tests will be run with " + "all settings files and without coverage.py." + ) + parser.add_argument( + "--with-coverage", + action="store_true", + help="Run tests with coverage.py and display coverage report", + ) + parser.add_argument( + "--settings", + choices=SETTINGS, + help="Run tests only for the specified settings file", + ) + args = parser.parse_args() + settings = args.settings and [args.settings] or SETTINGS + results = [] + django_admin = check_output(["which", "django-admin"]).strip() + for i, settings_module in enumerate(settings): + print("Running tests for: %s" % settings_module) + os.environ["DJANGO_SETTINGS_MODULE"] = "cache_machine.%s" % settings_module + # append to the existing coverage data for all but the first run + if args.with_coverage and i > 0: + test_cmd = ["coverage", "run", "--append"] + elif args.with_coverage: + test_cmd = ["coverage", "run"] + else: + test_cmd = [] + test_cmd += [django_admin, "test", "--keepdb"] + results.append(call(test_cmd)) + if args.with_coverage: + results.append(call(["coverage", "report", "-m", "--fail-under", "70"])) + sys.exit(any(results) and 1 or 0) + + +if __name__ == "__main__": + main() diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..122447f --- /dev/null +++ b/setup.cfg @@ -0,0 +1,10 @@ +[flake8] +max-line-length = 88 +extend-ignore = E203 +exclude= + .tox + .git + .direnv + +[isort] +profile = black diff --git a/setup.py b/setup.py index dae2dd0..d7a62d7 100644 --- a/setup.py +++ b/setup.py @@ -2,30 +2,35 @@ import caching - setup( - name='django-cache-machine', + name="django-cache-machine", version=caching.__version__, - description='Automatic caching and invalidation for Django models ' - 'through the ORM.', - long_description=open('README.rst').read(), - author='Jeff Balogh', - author_email='jbalogh@mozilla.com', - url='http://github.com/jbalogh/django-cache-machine', - license='BSD', - packages=['caching', 'caching.backends'], + description="Automatic caching and invalidation for Django models " + "through the ORM.", + long_description=open("README.rst").read(), + author="Jeff Balogh", + author_email="jbalogh@mozilla.com", + url="http://github.com/django-cache-machine/django-cache-machine", + license="BSD", + packages=["caching"], include_package_data=True, zip_safe=False, classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Web Environment', + "Development Status :: 4 - Beta", + "Environment :: Web Environment", # I don't know what exactly this means, but why not? - 'Environment :: Web Environment :: Mozilla', - 'Framework :: Django', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Topic :: Software Development :: Libraries :: Python Modules', - ] + "Environment :: Web Environment :: Mozilla", + "Framework :: Django", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Libraries :: Python Modules", + ], ) diff --git a/tests/test_cache.py b/tests/test_cache.py index a1017db..be212dd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,194 +1,237 @@ -# -*- coding: utf-8 -*- -from django.conf import settings -from django.core.cache import cache -from django.utils import translation, encoding +from __future__ import unicode_literals + +import logging +import pickle +import sys +import unittest import jinja2 -import mock -from nose.tools import eq_ +from django.conf import settings +from django.core.cache.backends.base import DEFAULT_TIMEOUT +from django.test import TestCase, TransactionTestCase +from django.utils import encoding, translation + +from caching import base, config, invalidation -from test_utils import ExtraAppTestCase -import caching.base as caching -from caching import invalidation +from .testapp.models import Addon, User -from testapp.models import Addon, User +if sys.version_info >= (3,): + from unittest import mock +else: + import mock -class CachingTestCase(ExtraAppTestCase): - fixtures = ['testapp/test_cache.json'] - extra_apps = ['tests.testapp'] +cache = invalidation.cache +log = logging.getLogger(__name__) + + +class CachingTestCase(TestCase): + fixtures = ["tests/testapp/fixtures/testapp/test_cache.json"] + extra_apps = ["tests.testapp"] def setUp(self): cache.clear() - self.old_timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) - if getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): + self.old_timeout = config.TIMEOUT + if getattr(settings, "CACHE_MACHINE_USE_REDIS", False): invalidation.redis.flushall() def tearDown(self): - settings.CACHE_COUNT_TIMEOUT = self.old_timeout + config.TIMEOUT = self.old_timeout def test_flush_key(self): """flush_key should work for objects or strings.""" a = Addon.objects.get(id=1) - eq_(caching.flush_key(a.cache_key), caching.flush_key(a)) + self.assertEqual( + base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a) + ) def test_cache_key(self): a = Addon.objects.get(id=1) - eq_(a.cache_key, 'o:testapp.addon:1') + self.assertEqual(a.cache_key, "o:testapp.addon:1:default") keys = set((a.cache_key, a.author1.cache_key, a.author2.cache_key)) - eq_(set(a._cache_keys()), keys) + self.assertEqual(set(a._cache_keys()), keys) def test_cache(self): """Basic cache test: second get comes from cache.""" - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) def test_filter_cache(self): - assert Addon.objects.filter(id=1)[0].from_cache is False - assert Addon.objects.filter(id=1)[0].from_cache is True + self.assertIs(Addon.objects.filter(id=1)[0].from_cache, False) + self.assertIs(Addon.objects.filter(id=1)[0].from_cache, True) def test_slice_cache(self): - assert Addon.objects.filter(id=1)[:1][0].from_cache is False - assert Addon.objects.filter(id=1)[:1][0].from_cache is True + self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, False) + self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, True) + + def test_should_not_cache_values(self): + with self.assertNumQueries(2): + Addon.objects.values("id")[0] + Addon.objects.values("id")[0] + + def test_should_not_cache_values_list(self): + with self.assertNumQueries(2): + Addon.objects.values_list("id")[0] + Addon.objects.values_list("id")[0] def test_invalidation(self): - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) a.save() - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) + a = [x for x in Addon.objects.all() if x.id == 1][0] + self.assertIs(a.from_cache, False) + + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, True) def test_invalidation_cross_locale(self): - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) # Do query & invalidation in a different locale. old_locale = translation.get_language() - translation.activate('fr') - assert Addon.objects.get(id=1).from_cache is True + translation.activate("fr") + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) a.save() - assert Addon.objects.get(id=1).from_cache is False - a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False translation.activate(old_locale) - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) def test_fk_invalidation(self): """When an object is invalidated, its foreign keys get invalidated.""" a = Addon.objects.get(id=1) - assert User.objects.get(name='clouseroo').from_cache is False + self.assertIs(User.objects.get(name="clouseroo").from_cache, False) a.save() - assert User.objects.get(name='clouseroo').from_cache is False + self.assertIs(User.objects.get(name="clouseroo").from_cache, False) def test_fk_parent_invalidation(self): """When a foreign key changes, any parent objects get invalidated.""" - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = Addon.objects.get(id=1) - assert a.from_cache is True + self.assertIs(a.from_cache, True) u = User.objects.get(id=a.author1.id) - assert u.from_cache is True - u.name = 'fffuuu' + self.assertIs(u.from_cache, True) + u.name = "fffuuu" u.save() - assert User.objects.get(id=a.author1.id).from_cache is False + self.assertIs(User.objects.get(id=a.author1.id).from_cache, False) a = Addon.objects.get(id=1) - assert a.from_cache is False - eq_(a.author1.name, 'fffuuu') + self.assertIs(a.from_cache, False) + self.assertEqual(a.author1.name, "fffuuu") def test_raw_cache(self): - sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table + sql = "SELECT * FROM %s WHERE id = 1" % Addon._meta.db_table raw = list(Addon.objects.raw(sql)) - eq_(len(raw), 1) + self.assertEqual(len(raw), 1) raw_addon = raw[0] a = Addon.objects.get(id=1) for field in Addon._meta.fields: - eq_(getattr(a, field.name), getattr(raw_addon, field.name)) - assert raw_addon.from_cache is False + self.assertEqual(getattr(a, field.name), getattr(raw_addon, field.name)) + self.assertIs(raw_addon.from_cache, False) cached = list(Addon.objects.raw(sql)) - eq_(len(cached), 1) + self.assertEqual(len(cached), 1) cached_addon = cached[0] a = Addon.objects.get(id=1) for field in Addon._meta.fields: - eq_(getattr(a, field.name), getattr(cached_addon, field.name)) - assert cached_addon.from_cache is True + self.assertEqual(getattr(a, field.name), getattr(cached_addon, field.name)) + self.assertIs(cached_addon.from_cache, True) def test_raw_cache_params(self): """Make sure the query params are included in the cache key.""" - sql = 'SELECT * from %s WHERE id = %%s' % Addon._meta.db_table + sql = "SELECT * from %s WHERE id = %%s" % Addon._meta.db_table raw = list(Addon.objects.raw(sql, [1]))[0] - eq_(raw.id, 1) + self.assertEqual(raw.id, 1) raw2 = list(Addon.objects.raw(sql, [2]))[0] - eq_(raw2.id, 2) + self.assertEqual(raw2.id, 2) + + @mock.patch("caching.base.CachingModelIterable") + def test_raw_nocache(self, CachingModelIterable): + base.TIMEOUT = 60 + sql = "SELECT * FROM %s WHERE id = 1" % Addon._meta.db_table + raw = list(Addon.objects.raw(sql, timeout=config.NO_CACHE)) + self.assertEqual(len(raw), 1) + raw_addon = raw[0] + self.assertFalse(hasattr(raw_addon, "from_cache")) + self.assertFalse(CachingModelIterable.called) - @mock.patch('caching.base.cache') + @mock.patch("caching.base.cache") def test_count_cache(self, cache_mock): - settings.CACHE_COUNT_TIMEOUT = 60 - cache_mock.scheme = 'memcached' + config.TIMEOUT = 60 + cache_mock.scheme = "memcached" cache_mock.get.return_value = None q = Addon.objects.all() - count = q.count() + q.count() + self.assertTrue(cache_mock.set.call_args, "set not called") args, kwargs = cache_mock.set.call_args key, value, timeout = args - eq_(value, 2) - eq_(timeout, 60) + self.assertEqual(value, 2) + self.assertEqual(timeout, 60) - @mock.patch('caching.base.cached') + @mock.patch("caching.base.cached") def test_count_none_timeout(self, cached_mock): - settings.CACHE_COUNT_TIMEOUT = None + config.TIMEOUT = config.NO_CACHE Addon.objects.count() - eq_(cached_mock.call_count, 0) + self.assertEqual(cached_mock.call_count, 0) + + @mock.patch("caching.base.cached") + def test_count_nocache(self, cached_mock): + base.TIMEOUT = 60 + Addon.objects.no_cache().count() + self.assertEqual(cached_mock.call_count, 0) def test_queryset_flush_list(self): """Check that we're making a flush list for the queryset.""" q = Addon.objects.all() objects = list(q) # Evaluate the queryset so it gets cached. - caching.invalidator.add_to_flush_list({q.flush_key(): ['remove-me']}) - cache.set('remove-me', 15) + base.invalidator.add_to_flush_list({q.flush_key(): ["remove-me"]}) + cache.set("remove-me", 15) Addon.objects.invalidate(objects[0]) - assert cache.get(q.flush_key()) is None - assert cache.get('remove-me') is None + self.assertIs(cache.get(q.flush_key()), None) + self.assertIs(cache.get("remove-me"), None) def test_jinja_cache_tag_queryset(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) + def check(q, expected): t = env.from_string( "{% cache q %}{% for x in q %}{{ x.id }}:{{ x.val }};" - "{% endfor %}{% endcache %}") - eq_(t.render(q=q), expected) + "{% endfor %}{% endcache %}" + ) + self.assertEqual(t.render(q=q), expected) # Get the template in cache, then hijack iterator to make sure we're # hitting the cached fragment. - check(Addon.objects.all(), '1:42;2:42;') + check(Addon.objects.all(), "1:42;2:42;") qs = Addon.objects.all() qs.iterator = mock.Mock() - check(qs, '1:42;2:42;') - assert not qs.iterator.called + check(qs, "1:42;2:42;") + self.assertFalse(qs.iterator.called) # Make changes, make sure we dropped the cached fragment. a = Addon.objects.get(id=1) @@ -196,228 +239,396 @@ def check(q, expected): a.save() q = Addon.objects.all() - flush = cache.get(q.flush_key()) - assert cache.get(q.flush_key()) is None + cache.get(q.flush_key()) + self.assertIs(cache.get(q.flush_key()), None) - check(Addon.objects.all(), '1:17;2:42;') + check(Addon.objects.all(), "1:17;2:42;") qs = Addon.objects.all() qs.iterator = mock.Mock() - check(qs, '1:17;2:42;') + check(qs, "1:17;2:42;") def test_jinja_cache_tag_object(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) def check(obj, expected): t = env.from_string( - '{% cache obj, 30 %}{{ obj.id }}:{{ obj.val }}{% endcache %}') - eq_(t.render(obj=obj), expected) + "{% cache obj, 30 %}{{ obj.id }}:{{ obj.val }}{% endcache %}" + ) + self.assertEqual(t.render(obj=obj), expected) - check(addon, '1:42') + check(addon, "1:42") addon.val = 17 addon.save() - check(addon, '1:17') + check(addon, "1:17") def test_jinja_multiple_tags(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) - template = ("{% cache obj %}{{ obj.id }}{% endcache %}\n" - "{% cache obj %}{{ obj.val }}{% endcache %}") + template = ( + "{% cache obj %}{{ obj.id }}{% endcache %}\n" + "{% cache obj %}{{ obj.val }}{% endcache %}" + ) def check(obj, expected): t = env.from_string(template) - eq_(t.render(obj=obj), expected) + self.assertEqual(t.render(obj=obj), expected) - check(addon, '1\n42') + check(addon, "1\n42") addon.val = 17 addon.save() - check(addon, '1\n17') + check(addon, "1\n17") def test_jinja_cache_tag_extra(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) - template = ('{% cache obj, extra=[obj.key] %}{{ obj.id }}:' - '{{ obj.key }}{% endcache %}') + template = ( + "{% cache obj, extra=[obj.key] %}{{ obj.id }}:" + "{{ obj.key }}{% endcache %}" + ) def check(obj, expected): t = env.from_string(template) - eq_(t.render(obj=obj), expected) + self.assertEqual(t.render(obj=obj), expected) addon.key = 1 - check(addon, '1:1') + check(addon, "1:1") addon.key = 2 - check(addon, '1:2') + check(addon, "1:2") - template = ('{% cache obj, 10, extra=[obj.key] %}{{ obj.id }}:' - '{{ obj.key }}{% endcache %}') + template = ( + "{% cache obj, 10, extra=[obj.key] %}{{ obj.id }}:" + "{{ obj.key }}{% endcache %}" + ) addon.key = 1 - check(addon, '1:1') + check(addon, "1:1") addon.key = 2 - check(addon, '1:2') + check(addon, "1:2") def test_cached_with(self): counter = mock.Mock() + def expensive(): counter() return counter.call_count a = Addon.objects.get(id=1) - f = lambda: caching.cached_with(a, expensive, 'key') + + def f(): + return base.cached_with(a, expensive, "key") # Only gets called once. - eq_(f(), 1) - eq_(f(), 1) + self.assertEqual(f(), 1) + self.assertEqual(f(), 1) # Switching locales does not reuse the cache. old_locale = translation.get_language() - translation.activate('fr') - eq_(f(), 2) + translation.activate("fr") + self.assertEqual(f(), 2) # Called again after flush. a.save() - eq_(f(), 3) + self.assertEqual(f(), 3) translation.activate(old_locale) - eq_(f(), 4) + self.assertEqual(f(), 4) counter.reset_mock() q = Addon.objects.filter(id=1) - f = lambda: caching.cached_with(q, expensive, 'key') + + def f(): + return base.cached_with(q, expensive, "key") # Only gets called once. - eq_(f(), 1) - eq_(f(), 1) + self.assertEqual(f(), 1) + self.assertEqual(f(), 1) # Called again after flush. list(q)[0].save() - eq_(f(), 2) - eq_(f(), 2) + self.assertEqual(f(), 2) + self.assertEqual(f(), 2) def test_cached_with_bad_object(self): """cached_with shouldn't fail if the object is missing a cache key.""" counter = mock.Mock() + def f(): counter() return counter.call_count - eq_(caching.cached_with([], f, 'key'), 1) + self.assertEqual(base.cached_with([], f, "key"), 1) def test_cached_with_unicode(self): - u = ':'.join(map(encoding.smart_str, [u'תיאור אוסף'])) + u = encoding.smart_bytes( + "\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 " "\\u05d0\\u05d5\\u05e1\\u05e3" + ) obj = mock.Mock() - obj.query_key.return_value = u'xxx' - obj.flush_key.return_value = 'key' - f = lambda: 1 - eq_(caching.cached_with(obj, f, 'adf:%s' % u), 1) + obj.query_key.return_value = "xxx" + obj.flush_key.return_value = "key" + + def f(): + return 1 + + self.assertEqual(base.cached_with(obj, f, "adf:%s" % u), 1) def test_cached_method(self): a = Addon.objects.get(id=1) - eq_(a.calls(), (1, 1)) - eq_(a.calls(), (1, 1)) + self.assertEqual(a.calls(), (1, 1)) + self.assertEqual(a.calls(), (1, 1)) a.save() # Still returns 1 since the object has it's own local cache. - eq_(a.calls(), (1, 1)) - eq_(a.calls(3), (3, 2)) + self.assertEqual(a.calls(), (1, 1)) + self.assertEqual(a.calls(3), (3, 2)) a = Addon.objects.get(id=1) - eq_(a.calls(), (1, 3)) - eq_(a.calls(4), (4, 4)) - eq_(a.calls(3), (3, 2)) + self.assertEqual(a.calls(), (1, 3)) + self.assertEqual(a.calls(4), (4, 4)) + self.assertEqual(a.calls(3), (3, 2)) b = Addon.objects.create(id=5, val=32, author1_id=1, author2_id=2) - eq_(b.calls(), (1, 5)) + self.assertEqual(b.calls(), (1, 5)) # Make sure we're updating the wrapper's docstring. - eq_(b.calls.__doc__, Addon.calls.__doc__) + self.assertEqual(b.calls.__doc__, Addon.calls.__doc__) - @mock.patch('caching.base.CacheMachine') - def test_no_cache_from_manager(self, CacheMachine): + @mock.patch("caching.base.cache.get") + def test_no_cache_from_manager(self, mock_cache): a = Addon.objects.no_cache().get(id=1) - eq_(a.id, 1) - assert not hasattr(a, 'from_cache') - assert not CacheMachine.called + self.assertEqual(a.id, 1) + self.assertFalse(hasattr(a, "from_cache")) + self.assertFalse(mock_cache.called) - @mock.patch('caching.base.CacheMachine') - def test_no_cache_from_queryset(self, CacheMachine): + @mock.patch("caching.base.cache.get") + def test_no_cache_from_queryset(self, mock_cache): a = Addon.objects.all().no_cache().get(id=1) - eq_(a.id, 1) - assert not hasattr(a, 'from_cache') - assert not CacheMachine.called + self.assertEqual(a.id, 1) + self.assertFalse(hasattr(a, "from_cache")) + self.assertFalse(mock_cache.called) def test_timeout_from_manager(self): q = Addon.objects.cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) a = q.get() - assert hasattr(a, 'from_cache') - eq_(a.id, 1) + self.assertTrue(hasattr(a, "from_cache")) + self.assertEqual(a.id, 1) def test_timeout_from_queryset(self): q = Addon.objects.all().cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) a = q.get() - assert hasattr(a, 'from_cache') - eq_(a.id, 1) + self.assertTrue(hasattr(a, "from_cache")) + self.assertEqual(a.id, 1) + + @unittest.skipUnless( + any(["memcache" in c["BACKEND"] for c in settings.CACHES.values()]), + "This test requires that Django use memcache", + ) + @mock.patch("memcache.Client.set") + def test_infinite_timeout(self, mock_set): + """ + Test that memcached infinite timeouts work with all Django versions. + """ + cache.set("foo", "bar", timeout=None) + # for memcached, 0 timeout means store forever + mock_set.assert_called_with(":1:foo", "bar", 0) def test_cache_and_no_cache(self): """Whatever happens last sticks.""" q = Addon.objects.no_cache().cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) no_cache = q.no_cache() # The querysets don't share anything. - eq_(q.timeout, 12) - assert no_cache.timeout != 12 + self.assertEqual(q.timeout, 12) + self.assertNotEqual(no_cache.timeout, 12) - assert not hasattr(no_cache.get(), 'from_cache') + self.assertFalse(hasattr(no_cache.get(), "from_cache")) - eq_(q.get().id, 1) - assert hasattr(q.get(), 'from_cache') + self.assertEqual(q.get().id, 1) + self.assertTrue(hasattr(q.get(), "from_cache")) - @mock.patch('caching.base.cache') + @mock.patch("caching.base.cache") def test_cache_machine_timeout(self, cache): - cache.scheme = 'memcached' + cache.scheme = "memcached" cache.get.return_value = None cache.get_many.return_value = {} a = Addon.objects.cache(12).get(id=1) - eq_(a.id, 1) + self.assertEqual(a.id, 1) - assert cache.add.called + self.assertTrue(cache.add.called) args, kwargs = cache.add.call_args - eq_(kwargs, {'timeout': 12}) + self.assertEqual(kwargs, {"timeout": 12}) def test_unicode_key(self): - list(User.objects.filter(name=u'ümlaüt')) + list(User.objects.filter(name="\\xfcmla\\xfct")) def test_empty_in(self): # Raised an exception before fixing #2. - eq_([], list(User.objects.filter(pk__in=[]))) + self.assertEqual([], list(User.objects.filter(pk__in=[]))) + + def test_empty_in_count(self): + # Regression test for #14. + self.assertEqual(0, User.objects.filter(pk__in=[]).count()) + + def test_empty_queryset(self): + for k in (1, 1): + with self.assertNumQueries(k): + self.assertEqual(len(Addon.objects.filter(pk=42)), 0) + + @mock.patch("caching.config.CACHE_EMPTY_QUERYSETS", True) + def test_cache_empty_queryset(self): + for k in (1, 0): + with self.assertNumQueries(k): + self.assertEqual(len(Addon.objects.filter(pk=42)), 0) def test_invalidate_empty_queryset(self): u = User.objects.create() - eq_(list(u.addon_set.all()), []) + self.assertEqual(list(u.addon_set.all()), []) Addon.objects.create(val=42, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42]) + self.assertEqual([a.val for a in u.addon_set.all()], [42]) - def test_invalidate_new_object(self): + def test_invalidate_new_related_object(self): u = User.objects.create() Addon.objects.create(val=42, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42]) + self.assertEqual([a.val for a in u.addon_set.all()], [42]) Addon.objects.create(val=17, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42, 17]) + self.assertEqual([a.val for a in u.addon_set.all()], [42, 17]) def test_make_key_unicode(self): - translation.activate(u'en-US') - f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' + translation.activate("en-US") + f = "fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e" # This would crash with a unicode error. - caching.make_key(f, with_locale=True) + base.make_key(f, with_locale=True) translation.deactivate() - @mock.patch('caching.invalidation.cache.get_many') + @mock.patch("caching.invalidation.cache.get_many") def test_get_flush_lists_none(self, cache_mock): - if not getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): + if not getattr(settings, "CACHE_MACHINE_USE_REDIS", False): cache_mock.return_value.values.return_value = [None, [1]] - eq_(caching.invalidator.get_flush_lists(None), set([1])) + self.assertEqual(base.invalidator.get_flush_lists(None), set([1])) + + def test_parse_backend_uri(self): + """Test that parse_backend_uri works as intended. Regression for #92.""" + from caching.invalidation import parse_backend_uri + + uri = "redis://127.0.0.1:6379?socket_timeout=5" + host, params = parse_backend_uri(uri) + self.assertEqual(host, "127.0.0.1:6379") + self.assertEqual(params, {"socket_timeout": "5"}) + + @mock.patch("caching.config.CACHE_INVALIDATE_ON_CREATE", "whole-model") + def test_invalidate_on_create_enabled(self): + """Test that creating new objects invalidates cached queries for that model.""" + self.assertEqual([a.name for a in User.objects.all()], ["fliggy", "clouseroo"]) + User.objects.create(name="spam") + users = User.objects.all() + # our new user should show up and the query should not have come from the cache + self.assertEqual([a.name for a in users], ["fliggy", "clouseroo", "spam"]) + self.assertFalse(any([u.from_cache for u in users])) + # if we run it again, it should be cached this time + users = User.objects.all() + self.assertEqual([a.name for a in users], ["fliggy", "clouseroo", "spam"]) + self.assertTrue(all([u.from_cache for u in User.objects.all()])) + + @mock.patch("caching.config.CACHE_INVALIDATE_ON_CREATE", None) + def test_invalidate_on_create_disabled(self): + """ + Test that creating new objects does NOT invalidate cached queries when + whole-model invalidation on create is disabled. + """ + users = User.objects.all() + self.assertTrue(users, "Can't run this test without some users") + self.assertFalse(any([u.from_cache for u in users])) + User.objects.create(name="spam") + self.assertTrue(all([u.from_cache for u in User.objects.all()])) + + def test_pickle_queryset(self): + """ + Test for CacheingQuerySet.__getstate__ and CachingQuerySet.__setstate__. + """ + # Make sure CachingQuerySet.timeout, when set to DEFAULT_TIMEOUT, can be safely + # pickled/unpickled on/from different Python processes which may have different + # underlying values for DEFAULT_TIMEOUT: + q1 = Addon.objects.all() + self.assertEqual(q1.timeout, DEFAULT_TIMEOUT) + pickled = pickle.dumps(q1) + new_timeout = object() + with mock.patch("caching.base.DEFAULT_TIMEOUT", new_timeout): + q2 = pickle.loads(pickled) + self.assertEqual(q2.timeout, new_timeout) + # Make sure values other than DEFAULT_TIMEOUT remain unaffected: + q1 = Addon.objects.cache(10).all() + self.assertEqual(q1.timeout, 10) + pickled = pickle.dumps(q1) + with mock.patch("caching.base.DEFAULT_TIMEOUT", new_timeout): + q2 = pickle.loads(pickled) + self.assertEqual(q2.timeout, 10) + + +# use TransactionTestCase so that ['TEST']['MIRROR'] setting works +# see https://code.djangoproject.com/ticket/23718 +class MultiDbTestCase(TransactionTestCase): + databases = {"default", "primary2", "replica", "replica2"} + fixtures = ["tests/testapp/fixtures/testapp/test_cache.json"] + extra_apps = ["tests.testapp"] + + def test_multidb_cache(self): + """Test where primary and replica DB result in two different cache keys""" + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) + + from_replica = Addon.objects.using("replica").get(id=1) + self.assertIs(from_replica.from_cache, False) + self.assertEqual(from_replica._state.db, "replica") + + def test_multidb_fetch_by_id(self): + """ + Test where primary and replica DB result in two different cache keys + with FETCH_BY_ID + """ + with self.settings(FETCH_BY_ID=True): + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) + + from_replica = Addon.objects.using("replica").get(id=1) + self.assertIs(from_replica.from_cache, False) + self.assertEqual(from_replica._state.db, "replica") + + def test_multidb_primary_replica_invalidation(self): + """Test saving an object on one DB invalidates it for all DBs""" + log.debug("priming the DB & cache") + primary_obj = User.objects.using("default").create(name="new-test-user") + replica_obj = User.objects.using("replica").get(name="new-test-user") + self.assertIs(replica_obj.from_cache, False) + log.debug("deleting the original object") + User.objects.using("default").filter(pk=replica_obj.pk).delete() + log.debug("re-creating record with a new primary key") + primary_obj = User.objects.using("default").create(name="new-test-user") + log.debug("attempting to force re-fetch from DB (should not use cache)") + replica_obj = User.objects.using("replica").get(name="new-test-user") + self.assertIs(replica_obj.from_cache, False) + self.assertEqual(replica_obj.pk, primary_obj.pk) + + def test_multidb_no_db_crossover(self): + """Test no crossover of objects with identical PKs""" + primary_obj = User.objects.using("default").create(name="new-test-user") + primary_obj2 = User.objects.using("primary2").create( + pk=primary_obj.pk, + name="other-test-user", + ) + # prime the cache for the default DB + primary_obj = User.objects.using("default").get(name="new-test-user") + self.assertIs(primary_obj.from_cache, False) + primary_obj = User.objects.using("default").get(name="new-test-user") + self.assertIs(primary_obj.from_cache, True) + # prime the cache for the 2nd primary DB + primary_obj2 = User.objects.using("primary2").get(name="other-test-user") + self.assertIs(primary_obj2.from_cache, False) + primary_obj2 = User.objects.using("primary2").get(name="other-test-user") + self.assertIs(primary_obj2.from_cache, True) + # ensure no crossover between databases + self.assertNotEqual(primary_obj.name, primary_obj2.name) diff --git a/tests/testapp/fixtures/testapp/test_cache.json b/tests/testapp/fixtures/testapp/test_cache.json index 40221b8..e9ed2df 100644 --- a/tests/testapp/fixtures/testapp/test_cache.json +++ b/tests/testapp/fixtures/testapp/test_cache.json @@ -1,34 +1,34 @@ [ - { - "pk": 1, - "model": "testapp.user", - "fields": { - "name": "fliggy" - } - }, - { - "pk": 2, - "model": "testapp.user", - "fields": { - "name": "clouseroo" - } - }, - { - "pk": 1, - "model": "testapp.addon", - "fields": { - "author2": 1, - "author1": 2, - "val": 42 - } - }, - { - "pk": 2, - "model": "testapp.addon", - "fields": { - "author2": 1, - "author1": 2, - "val": 42 - } + { + "pk": 1, + "model": "testapp.user", + "fields": { + "name": "fliggy" } + }, + { + "pk": 2, + "model": "testapp.user", + "fields": { + "name": "clouseroo" + } + }, + { + "pk": 1, + "model": "testapp.addon", + "fields": { + "author2": 1, + "author1": 2, + "val": 42 + } + }, + { + "pk": 2, + "model": "testapp.addon", + "fields": { + "author2": 1, + "author1": 2, + "val": 42 + } + } ] diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 2e88db6..429d44e 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,9 +1,9 @@ -from django.db import models - -import mock +from unittest import mock -from caching.base import CachingMixin, CachingManager, cached_method +import django +from django.db import models +from caching.base import CachingManager, CachingMixin, cached_method # This global call counter will be shared among all instances of an Addon. call_counter = mock.Mock() @@ -14,14 +14,27 @@ class User(CachingMixin, models.Model): objects = CachingManager() + if django.VERSION[0] >= 2: + + class Meta: + # Tell Django to use this manager when resolving foreign keys. + # (Django >= 2.0) + base_manager_name = "objects" + class Addon(CachingMixin, models.Model): val = models.IntegerField() - author1 = models.ForeignKey(User) - author2 = models.ForeignKey(User, related_name='author2_set') + author1 = models.ForeignKey(User, on_delete=models.CASCADE) + author2 = models.ForeignKey( + User, related_name="author2_set", on_delete=models.CASCADE + ) objects = CachingManager() + class Meta: + # without this, Postgres & SQLite return objects in different orders: + ordering = ("pk",) + @cached_method def calls(self, arg=1): """This is a docstring for calls()""" diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..1303662 --- /dev/null +++ b/tox.ini @@ -0,0 +1,43 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + +[tox] +envlist = py3{6,7,8,9}-{2.2,3.0,3.1,3.2},py310-3.2,py3{8,9,10}-{4.0} + +[gh-actions] +python = + 3.6: py36 + 3.7: py37 + 3.8: py38 + 3.9: py39 + 3.10: py310 + +[testenv] +commands = {envpython} run_tests.py --with-coverage +passenv = + DATABASE_URL + DATABASE_URL_2 +deps = + -rdev-requirements.txt + 2.2: Django>=2.2,<3.0 + 3.0: Django>=3.0,<3.1 + 3.1: Django>=3.1,<3.2 + 3.2: Django>=3.2,<4.0 + 4.0: Django>=4.0,<4.1 + +[testenv:docs] +basepython = python3.7 +deps = + Sphinx + Django +setenv = + PYTHONPATH = {toxinidir}/examples/ + DJANGO_SETTINGS_MODULE = cache_machine.settings +changedir = docs +commands = /usr/bin/make html + +[testenv:py37-flake8] +deps = flake8 +commands = flake8