From 42b6a393d2aa406c22b6617584daeb620d41daf9 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 4 Jun 2010 13:24:35 -0700 Subject: [PATCH 001/214] trying fetch-by-id for better cache hit ratio --- caching/base.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/caching/base.py b/caching/base.py index 44f5fed..06f2b00 100644 --- a/caching/base.py +++ b/caching/base.py @@ -24,6 +24,7 @@ def emit(self, record): FOREVER = 0 NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') +FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) FLUSH = CACHE_PREFIX + ':flush:' scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND) @@ -158,6 +159,8 @@ def cache_objects(self, objects): for key in map(flush_key, obj._cache_keys()): if key != obj_flush: flush_lists[key].append(obj_flush) + if FETCH_BY_ID: + flush_lists[key].append(byid(obj)) add_to_flush_list(flush_lists) @@ -184,8 +187,43 @@ def iterator(self): query_string = self.query_key() except query.EmptyResultSet: return iterator() + if FETCH_BY_ID: + iterator = self.fetch_by_id return iter(CacheMachine(query_string, iterator, self.timeout)) + def fetch_by_id(self): + """ + Run two queries to get objects: one for the ids, one for id__in=ids. + + After getting ids from the first query we can try cache.get_many to + reuse objects we've already seen. Then we fetch the remaining items + from the db, and put those in the cache. This prevents cache + duplication. + """ + # Include columns from extra since they could be used in the query's + # order_by. + vals = self.values_list('pk', *self.query.extra.keys()) + pks = [val[0] for val in vals] + keys = dict((byid(self.model._cache_key(pk)), pk) for pk in pks) + cached = dict((k, v) for k, v in cache.get_many(keys).items() + if v is not None) + + missed = [pk for key, pk in keys.items() if key not in cached] + others = self.model.objects.filter(pk__in=missed) + if hasattr(others, 'no_cache'): + others = others.no_cache() + if self.query.select_related: + others.dup_select_related(self) + + # Put the fetched objects back in cache. + new = dict((byid(o), o) for o in others) + cache.set_many(new) + + # Use pks to return the objects in the correct order. + objects = dict((o.pk, o) for o in cached.values() + new.values()) + for pk in pks: + yield objects[pk] + def count(self): timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) super_count = super(CachingQuerySet, self).count @@ -256,6 +294,11 @@ def flush_key(obj): return FLUSH + make_key(key, with_locale=False) +def byid(obj): + key = obj if isinstance(obj, basestring) else obj._cache_key(obj.pk) + return make_key('byid:' + key) + + def add_to_flush_list(mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" flush_lists = collections.defaultdict(set) From cd862c9b49d2665427e82c99c25b8db3c50aadc1 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 8 Jun 2010 17:55:10 -0700 Subject: [PATCH 002/214] clear out the default ordering since we order based on the query --- caching/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 06f2b00..e3eb9b0 100644 --- a/caching/base.py +++ b/caching/base.py @@ -209,7 +209,8 @@ def fetch_by_id(self): if v is not None) missed = [pk for key, pk in keys.items() if key not in cached] - others = self.model.objects.filter(pk__in=missed) + # Clear out the default ordering since we order based on the query. + others = self.model.objects.filter(pk__in=missed).order_by() if hasattr(others, 'no_cache'): others = others.no_cache() if self.query.select_related: From 4209189c365a6862463912de9346094af24291aa Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 25 Jun 2010 14:43:27 -0700 Subject: [PATCH 003/214] extension point for adding anything extra to the cache_support. --- caching/ext.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/caching/ext.py b/caching/ext.py index 6d9d20e..ac6eaea 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -47,11 +47,17 @@ def parse(self, parser): body = parser.parse_statements(['name:endcache'], drop_needle=True) + self.process_cache_arguments(args) + # now return a `CallBlock` node that calls our _cache_support # helper method on this extension. return nodes.CallBlock(self.call_method('_cache_support', args), [], [], body).set_lineno(lineno) + def process_cache_arguments(self, args): + """Extension point for adding anything extra to the cache_support.""" + pass + def _cache_support(self, name, obj, timeout, caller): """Cache helper callback.""" if settings.TEMPLATE_DEBUG: From 5ac91c7583999ac729f91a7fe5ff74f2dbd425e1 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 17 Aug 2010 13:07:40 -0700 Subject: [PATCH 004/214] fixing another unicode issue (bug 588123) --- caching/base.py | 2 +- tests/test_cache.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 44f5fed..664e3b4 100644 --- a/caching/base.py +++ b/caching/base.py @@ -307,7 +307,7 @@ def cached_with(obj, f, f_key, timeout=None): log.warning(u'%r cannot be cached.' % obj) return f() - key = '%s:%s' % (f_key, obj_key) + key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. add_to_flush_list({obj.flush_key(): [_function_cache_key(key)]}) return cached(f, key, timeout) diff --git a/tests/test_cache.py b/tests/test_cache.py index 5da7ab6..f66cd0c 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from django.conf import settings from django.core.cache import cache -from django.utils import translation +from django.utils import translation, encoding import jinja2 import mock @@ -293,6 +293,14 @@ def f(): eq_(caching.cached_with([], f, 'key'), 1) + def test_cached_with_unicode(self): + u = ':'.join(map(encoding.smart_str, [u'תיאור אוסף'])) + obj = mock.Mock() + obj.query_key.return_value = u'xxx' + obj.flush_key.return_value = 'key' + f = lambda: 1 + eq_(caching.cached_with(obj, f, 'adf:%s' % u), 1) + def test_cached_method(self): a = Addon.objects.get(id=1) eq_(a.calls(), (1, 1)) From d03fa491d8e651abb8f8eb71f4a24fd3b2b2252c Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 18 Aug 2010 16:47:03 -0700 Subject: [PATCH 005/214] =?UTF-8?q?=E2=99=A5=20unicode?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- caching/base.py | 6 +++--- tests/test_cache.py | 7 +++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/caching/base.py b/caching/base.py index 664e3b4..8d169f9 100644 --- a/caching/base.py +++ b/caching/base.py @@ -270,13 +270,13 @@ def add_to_flush_list(mapping): def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = '%s:%s' % (CACHE_PREFIX, k) + key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k)) if with_locale: - key += translation.get_language() + key += encoding.smart_str(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. if 'memcached' in cache.scheme: - return hashlib.md5(encoding.smart_str(key)).hexdigest() + return hashlib.md5(key).hexdigest() else: return key diff --git a/tests/test_cache.py b/tests/test_cache.py index f66cd0c..9c0c50c 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -398,3 +398,10 @@ def test_invalidate_new_object(self): eq_([a.val for a in u.addon_set.all()], [42]) Addon.objects.create(val=17, author1=u, author2=u) eq_([a.val for a in u.addon_set.all()], [42, 17]) + + def test_make_key_unicode(self): + translation.activate(u'en-US') + f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' + eq_(caching.make_key(f, with_locale=True), + 'b83d174032efa27bf1c9ce1db19fa6ec') + translation.deactivate() From 0ca435683f81cd0a07f4b91f8f450e3355e5e315 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 29 Oct 2010 13:57:05 -0700 Subject: [PATCH 006/214] getting test coverage for FETCH_BY_ID --- caching/invalidation.py | 2 +- examples/cache-machine/memcache_byid.py | 3 +++ examples/cache-machine/redis_byid.py | 3 +++ fabfile.py | 5 ++++- 4 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 examples/cache-machine/memcache_byid.py create mode 100644 examples/cache-machine/redis_byid.py diff --git a/caching/invalidation.py b/caching/invalidation.py index 8fe5f1f..89d93a4 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -98,7 +98,7 @@ def cache_objects(self, objects, query_key, query_flush): if key != obj_flush: flush_lists[key].add(obj_flush) if FETCH_BY_ID: - flush_lists[key].append(byid(obj)) + flush_lists[key].add(byid(obj)) self.add_to_flush_list(flush_lists) def find_flush_lists(self, keys): diff --git a/examples/cache-machine/memcache_byid.py b/examples/cache-machine/memcache_byid.py new file mode 100644 index 0000000..85d711d --- /dev/null +++ b/examples/cache-machine/memcache_byid.py @@ -0,0 +1,3 @@ +from settings import * + +FETCH_BY_ID = True diff --git a/examples/cache-machine/redis_byid.py b/examples/cache-machine/redis_byid.py new file mode 100644 index 0000000..0504351 --- /dev/null +++ b/examples/cache-machine/redis_byid.py @@ -0,0 +1,3 @@ +from redis_settings import * + +FETCH_BY_ID = True diff --git a/fabfile.py b/fabfile.py index 113f921..e953208 100644 --- a/fabfile.py +++ b/fabfile.py @@ -26,8 +26,11 @@ def doc(kind='html'): local('make clean %s' % kind) +SETTINGS = ('locmem_settings', 'settings', 'memcache_byid', + 'redis_settings', 'redis_byid') + def test(): - for settings in ('locmem_settings', 'settings', 'redis_settings'): + for settings in SETTINGS: print settings os.environ['DJANGO_SETTINGS_MODULE'] = 'cache-machine.%s' % settings local('django-admin.py test') From 7424e8baaab012fd06dbf15bfcfa3c177f32b5d6 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 3 Nov 2010 11:34:11 -0700 Subject: [PATCH 007/214] reuse the existing query --- caching/base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/caching/base.py b/caching/base.py index b351b06..25f30d5 100644 --- a/caching/base.py +++ b/caching/base.py @@ -166,11 +166,9 @@ def fetch_by_id(self): missed = [pk for key, pk in keys.items() if key not in cached] # Clear out the default ordering since we order based on the query. - others = self.model.objects.filter(pk__in=missed).order_by() + others = self.filter(pk__in=missed).order_by() if hasattr(others, 'no_cache'): others = others.no_cache() - if self.query.select_related: - others.dup_select_related(self) # Put the fetched objects back in cache. new = dict((byid(o), o) for o in others) From dd641b76277754c53b4cf73256ee72c8e298d67c Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 10 Nov 2010 10:20:15 -0800 Subject: [PATCH 008/214] Revert "reuse the existing query" This reverts commit 7424e8baaab012fd06dbf15bfcfa3c177f32b5d6. --- caching/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 25f30d5..b351b06 100644 --- a/caching/base.py +++ b/caching/base.py @@ -166,9 +166,11 @@ def fetch_by_id(self): missed = [pk for key, pk in keys.items() if key not in cached] # Clear out the default ordering since we order based on the query. - others = self.filter(pk__in=missed).order_by() + others = self.model.objects.filter(pk__in=missed).order_by() if hasattr(others, 'no_cache'): others = others.no_cache() + if self.query.select_related: + others.dup_select_related(self) # Put the fetched objects back in cache. new = dict((byid(o), o) for o in others) From 87c7db8791bf64be3488b060305a9ca6093313a3 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 10 Nov 2010 10:22:15 -0800 Subject: [PATCH 009/214] use the parent's db --- caching/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index b351b06..9db1837 100644 --- a/caching/base.py +++ b/caching/base.py @@ -166,7 +166,8 @@ def fetch_by_id(self): missed = [pk for key, pk in keys.items() if key not in cached] # Clear out the default ordering since we order based on the query. - others = self.model.objects.filter(pk__in=missed).order_by() + others = (self.model.objects.filter(pk__in=missed).order_by() + .using(self.db)) if hasattr(others, 'no_cache'): others = others.no_cache() if self.query.select_related: From 39b1b04331e9f89d790d59b35e7861ee461c3517 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Thu, 11 Nov 2010 10:39:19 -0800 Subject: [PATCH 010/214] adding logging to figure out bug 604685 --- caching/invalidation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/caching/invalidation.py b/caching/invalidation.py index 89d93a4..1ab2924 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -58,6 +58,7 @@ def wrapper(*args, **kw): return f(*args, **kw) except (socket.error, redislib.RedisError), e: log.error('redis error: %s' % e) + log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) if hasattr(return_type, '__call__'): return return_type() else: From 6673914e3828d87af21c8af7cec3c9fea60e2b6f Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 19 Nov 2010 11:07:27 -0800 Subject: [PATCH 011/214] django 1.3 complains about memcached-unsafe keys --- caching/invalidation.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 1ab2924..14c6db3 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -28,10 +28,7 @@ def make_key(k, with_locale=True): key += encoding.smart_str(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. - if 'memcached' in cache.scheme: - return hashlib.md5(key).hexdigest() - else: - return key + return hashlib.md5(key).hexdigest() def flush_key(obj): From 171bfe6daae510745ba2b6f8431aa726084ca273 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 30 Nov 2010 14:54:10 -0800 Subject: [PATCH 012/214] add tests for queries with slices --- tests/test_cache.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_cache.py b/tests/test_cache.py index a7404c3..a1017db 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -44,6 +44,14 @@ def test_cache(self): assert Addon.objects.get(id=1).from_cache is False assert Addon.objects.get(id=1).from_cache is True + def test_filter_cache(self): + assert Addon.objects.filter(id=1)[0].from_cache is False + assert Addon.objects.filter(id=1)[0].from_cache is True + + def test_slice_cache(self): + assert Addon.objects.filter(id=1)[:1][0].from_cache is False + assert Addon.objects.filter(id=1)[:1][0].from_cache is True + def test_invalidation(self): assert Addon.objects.get(id=1).from_cache is False a = [x for x in Addon.objects.all() if x.id == 1][0] From d2175e0ef4f8f449055025e3efe7b8932dde7af9 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 30 Nov 2010 14:57:50 -0800 Subject: [PATCH 013/214] reuse the existing query for fetch-byid --- caching/base.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 9db1837..31a7504 100644 --- a/caching/base.py +++ b/caching/base.py @@ -164,10 +164,13 @@ def fetch_by_id(self): cached = dict((k, v) for k, v in cache.get_many(keys).items() if v is not None) + # Pick up the objects we missed. missed = [pk for key, pk in keys.items() if key not in cached] + # Reuse self but clear limits in case there was a slice. + others = self.all() + others.query.clear_limits() # Clear out the default ordering since we order based on the query. - others = (self.model.objects.filter(pk__in=missed).order_by() - .using(self.db)) + others = others.order_by().filter(pk__in=missed) if hasattr(others, 'no_cache'): others = others.no_cache() if self.query.select_related: From a7081a635388f6d03de1698f2ce075c145e49cb9 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 16 Feb 2011 15:58:04 -0800 Subject: [PATCH 014/214] don't do any more queries if missing is empty --- caching/base.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/caching/base.py b/caching/base.py index 31a7504..d167b33 100644 --- a/caching/base.py +++ b/caching/base.py @@ -166,19 +166,22 @@ def fetch_by_id(self): # Pick up the objects we missed. missed = [pk for key, pk in keys.items() if key not in cached] - # Reuse self but clear limits in case there was a slice. - others = self.all() - others.query.clear_limits() - # Clear out the default ordering since we order based on the query. - others = others.order_by().filter(pk__in=missed) - if hasattr(others, 'no_cache'): - others = others.no_cache() - if self.query.select_related: - others.dup_select_related(self) - - # Put the fetched objects back in cache. - new = dict((byid(o), o) for o in others) - cache.set_many(new) + if missed: + # Reuse the queryset but get a clean query. + others = self.all() + others.query.clear_limits() + # Clear out the default ordering since we order based on the query. + others = others.order_by().filter(pk__in=missed) + if hasattr(others, 'no_cache'): + others = others.no_cache() + if self.query.select_related: + others.dup_select_related(self) + + # Put the fetched objects back in cache. + new = dict((byid(o), o) for o in others) + cache.set_many(new) + else: + new = {} # Use pks to return the objects in the correct order. objects = dict((o.pk, o) for o in cached.values() + new.values()) From 5a543dbac108cae7be6f26d36de8004b4ec4fc2c Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 16 Feb 2011 15:58:28 -0800 Subject: [PATCH 015/214] get a clean Query for the id query --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index d167b33..a0ae34d 100644 --- a/caching/base.py +++ b/caching/base.py @@ -169,7 +169,7 @@ def fetch_by_id(self): if missed: # Reuse the queryset but get a clean query. others = self.all() - others.query.clear_limits() + others.query = query.Query(others.model) # Clear out the default ordering since we order based on the query. others = others.order_by().filter(pk__in=missed) if hasattr(others, 'no_cache'): From 995b62c68e0be452acf9a64b91293beafa5ac470 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Thu, 17 Feb 2011 16:21:29 -0800 Subject: [PATCH 016/214] Revert "get a clean Query for the id query" This reverts commit 5a543dbac108cae7be6f26d36de8004b4ec4fc2c. --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index a0ae34d..d167b33 100644 --- a/caching/base.py +++ b/caching/base.py @@ -169,7 +169,7 @@ def fetch_by_id(self): if missed: # Reuse the queryset but get a clean query. others = self.all() - others.query = query.Query(others.model) + others.query.clear_limits() # Clear out the default ordering since we order based on the query. others = others.order_by().filter(pk__in=missed) if hasattr(others, 'no_cache'): From ec5ad09a94b2ec6205d1b1e00b96803bf0a69fcc Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 18 Feb 2011 16:37:55 -0800 Subject: [PATCH 017/214] move the missed query to an overridable method --- caching/base.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/caching/base.py b/caching/base.py index d167b33..e76fb7c 100644 --- a/caching/base.py +++ b/caching/base.py @@ -167,16 +167,7 @@ def fetch_by_id(self): # Pick up the objects we missed. missed = [pk for key, pk in keys.items() if key not in cached] if missed: - # Reuse the queryset but get a clean query. - others = self.all() - others.query.clear_limits() - # Clear out the default ordering since we order based on the query. - others = others.order_by().filter(pk__in=missed) - if hasattr(others, 'no_cache'): - others = others.no_cache() - if self.query.select_related: - others.dup_select_related(self) - + others = self.fetch_missed(missed) # Put the fetched objects back in cache. new = dict((byid(o), o) for o in others) cache.set_many(new) @@ -188,6 +179,18 @@ def fetch_by_id(self): for pk in pks: yield objects[pk] + def fetch_missed(self, pks): + # Reuse the queryset but get a clean query. + others = self.all() + others.query.clear_limits() + # Clear out the default ordering since we order based on the query. + others = others.order_by().filter(pk__in=pks) + if hasattr(others, 'no_cache'): + others = others.no_cache() + if self.query.select_related: + others.dup_select_related(self) + return others + def count(self): timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) super_count = super(CachingQuerySet, self).count From 7e1152e27a19ef1a3b3f60c0e66d2c84d6bfeaa3 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 25 Feb 2011 16:31:07 -0800 Subject: [PATCH 018/214] updating for django 1.3 --- caching/backends/locmem.py | 4 ++-- caching/backends/memcached.py | 8 ++++---- caching/base.py | 3 --- examples/cache-machine/locmem_settings.py | 6 +++++- examples/cache-machine/settings.py | 7 ++++++- 5 files changed, 17 insertions(+), 11 deletions(-) diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index 48cbfd2..ca955f3 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -4,12 +4,12 @@ # Add infinite timeout support to the locmem backend. Useful for testing. class CacheClass(locmem.CacheClass): - def add(self, key, value, timeout=None): + def add(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity return super(CacheClass, self).add(key, value, timeout) - def set(self, key, value, timeout=None): + def set(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity return super(CacheClass, self).set(key, value, timeout) diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index 0ef77a4..f0b6670 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -5,12 +5,12 @@ # Add infinite timeout support to the memcached backend. class CacheClass(memcached.CacheClass): - def add(self, key, value, timeout=None): + def add(self, key, value, timeout=None, version=None): if timeout is None: timeout = self.default_timeout - return self._cache.add(smart_str(key), value, timeout) + return super(CacheClass, self).add(key, value, timeout, version) - def set(self, key, value, timeout=None): + def set(self, key, value, timeout=None, version=None): if timeout is None: timeout = self.default_timeout - return self._cache.set(smart_str(key), value, timeout) + return super(CacheClass, self).set(key, value, timeout, version) diff --git a/caching/base.py b/caching/base.py index e76fb7c..2784f6b 100644 --- a/caching/base.py +++ b/caching/base.py @@ -26,9 +26,6 @@ def emit(self, record): CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND) -cache.scheme = scheme - class CachingManager(models.Manager): diff --git a/examples/cache-machine/locmem_settings.py b/examples/cache-machine/locmem_settings.py index de85736..5b3cbaf 100644 --- a/examples/cache-machine/locmem_settings.py +++ b/examples/cache-machine/locmem_settings.py @@ -1,3 +1,7 @@ from settings import * -CACHE_BACKEND = 'caching.backends.locmem://' +CACHES = { + 'default': { + 'BACKEND': 'caching.backends.locmem.CacheClass', + }, +} diff --git a/examples/cache-machine/settings.py b/examples/cache-machine/settings.py index 56192b8..811c633 100644 --- a/examples/cache-machine/settings.py +++ b/examples/cache-machine/settings.py @@ -1,4 +1,9 @@ -CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' +CACHES = { + 'default': { + 'BACKEND': 'caching.backends.memcached.CacheClass', + 'LOCATION': 'localhost:11211', + }, +} TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner' From 7ffe24a9821249ccd9e0c88288a1ac16bb71341c Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 27 Apr 2011 18:02:27 -0700 Subject: [PATCH 019/214] this line can be troublesome (bug 652019) --- caching/invalidation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 14c6db3..bd6e7e4 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -55,7 +55,7 @@ def wrapper(*args, **kw): return f(*args, **kw) except (socket.error, redislib.RedisError), e: log.error('redis error: %s' % e) - log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) + # log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) if hasattr(return_type, '__call__'): return return_type() else: From 416804a5052d101b0f4026b3a26aa94753086e63 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 11 May 2011 10:37:36 -0500 Subject: [PATCH 020/214] let caching logging config be overridden (thanks qingfeng) --- caching/base.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/caching/base.py b/caching/base.py index 2784f6b..634a28b 100644 --- a/caching/base.py +++ b/caching/base.py @@ -7,18 +7,12 @@ from django.db.models import signals from django.db.models.sql import query from django.utils import encoding +from django.utils.log import NullHandler from .invalidation import invalidator, flush_key, make_key, byid -class NullHandler(logging.Handler): - - def emit(self, record): - pass - - log = logging.getLogger('caching') -log.setLevel(logging.INFO) log.addHandler(NullHandler()) FOREVER = 0 From 53c25417b789ce76728dcac4627bfb70ab9057cb Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Mon, 23 May 2011 10:10:01 -0700 Subject: [PATCH 021/214] keep defining our own NullHandler to maintain django 1.2 compat --- caching/base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 634a28b..bfd749a 100644 --- a/caching/base.py +++ b/caching/base.py @@ -7,11 +7,16 @@ from django.db.models import signals from django.db.models.sql import query from django.utils import encoding -from django.utils.log import NullHandler from .invalidation import invalidator, flush_key, make_key, byid +class NullHandler(logging.Handler): + + def emit(self, record): + pass + + log = logging.getLogger('caching') log.addHandler(NullHandler()) From 18eb801d44785626ca8c3de993282847971a3980 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 5 Jul 2011 10:14:41 -0700 Subject: [PATCH 022/214] release a new version compatible with Django 1.3 --- README.rst | 3 +-- caching/__init__.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 48c8463..c1670f2 100644 --- a/README.rst +++ b/README.rst @@ -11,8 +11,7 @@ For full docs, see http://jbalogh.me/projects/cache-machine. Requirements ------------ -Cache Machine requires Django 1.2 (currently on trunk). It was written and -tested on Python 2.6. +Cache Machine requires Django 1.3. It was written and tested on Python 2.6. Installation diff --git a/caching/__init__.py b/caching/__init__.py index b06d400..444f6b9 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, 4) +VERSION = (0, 6) __version__ = '.'.join(map(str, VERSION)) From 09653b936188df797f0aa785e69c00584067170a Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 19 Jul 2011 16:51:53 -0700 Subject: [PATCH 023/214] beware the unicode monster --- caching/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index bfd749a..987a7cf 100644 --- a/caching/base.py +++ b/caching/base.py @@ -2,7 +2,7 @@ import logging from django.conf import settings -from django.core.cache import cache, parse_backend_uri +from django.core.cache import cache from django.db import models from django.db.models import signals from django.db.models.sql import query @@ -274,7 +274,7 @@ def cached_with(obj, f, f_key, timeout=None): obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) except AttributeError: - log.warning(u'%r cannot be cached.' % obj) + log.warning(u'%r cannot be cached.' % encoding.smart_str(obj)) return f() key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) From ee6e23c3185a48efec5579e0e9d7521b54c1e676 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Mon, 3 Oct 2011 13:45:26 -0700 Subject: [PATCH 024/214] update the requirements.txt file --- requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 28477bb..8d7dd37 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,10 @@ # These are the reqs to build docs and run tests. sphinx mock --e git://github.com/jbalogh/django-nose.git@6f060d49ee193a05734704820f3fea92ee1759d2#egg=django-nose --e svn+http://code.djangoproject.com/svn/django/trunk@12335#egg=Django +django-nose +-e svn+http://code.djangoproject.com/svn/django/trunk@16922#egg=Django python-memcached -e git://github.com/jbalogh/test-utils.git#egg=test-utils fabric jinja2 +redis From 57bc3be4f10a83ac0f1623462fbfd0d76a2c1afe Mon Sep 17 00:00:00 2001 From: Michael Kelly Date: Wed, 11 Jan 2012 12:01:08 -0800 Subject: [PATCH 025/214] Fix CachingQuerySet to respect no_cache. --- caching/base.py | 2 +- tests/test_cache.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 987a7cf..d82bad2 100644 --- a/caching/base.py +++ b/caching/base.py @@ -191,7 +191,7 @@ def count(self): timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) super_count = super(CachingQuerySet, self).count query_string = 'count:%s' % self.query_key() - if timeout is None: + if self.timeout == NO_CACHE or timeout is None: return super_count() else: return cached_with(self, super_count, query_string, timeout) diff --git a/tests/test_cache.py b/tests/test_cache.py index a1017db..ea76475 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -163,6 +163,12 @@ def test_count_none_timeout(self, cached_mock): Addon.objects.count() eq_(cached_mock.call_count, 0) + @mock.patch('caching.base.cached') + def test_count_nocache(self, cached_mock): + settings.CACHE_COUNT_TIMEOUT = 60 + Addon.objects.no_cache().count() + eq_(cached_mock.call_count, 0) + def test_queryset_flush_list(self): """Check that we're making a flush list for the queryset.""" q = Addon.objects.all() From 7a3b48f7d0d26482568f9cdac6d7725e2bb69a12 Mon Sep 17 00:00:00 2001 From: James Socol Date: Thu, 16 Feb 2012 17:36:36 -0500 Subject: [PATCH 026/214] Update docs for Django 1.3. --- docs/index.rst | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/index.rst b/docs/index.rst index 97616e2..3e734e8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,6 +36,31 @@ If you want to set a prefix for all keys in Cache Machine, define CACHE_PREFIX = 'weee:' + +Django 1.3 +^^^^^^^^^^ + +With Django 1.3 or higher, you should use the new ``CACHES`` setting:: + + CACHES = { + 'default': { + 'BACKEND': 'caching.backends.memcached.CacheClass', + 'LOCATION': [ + 'server-1:11211', + 'server-2:11211', + ], + 'PREFIX': 'weee:', + }, + } + +Note that we have to specify the class, not the module, for the ``BACKEND`` +property, and that the ``PREFIX`` is optional. The ``LOCATION`` may be a +string, instead of a list, if you only have one server. + + +COUNT queries +^^^^^^^^^^^^^ + Calls to ``QuerySet.count()`` can be cached, but they cannot be reliably invalidated. Cache Machine would have to do a full select to figure out the object keys, which is probably much more data than you want to pull. I From a592a030f7b45ec5bcb34619de6f66862f35e530 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Mon, 26 Mar 2012 10:42:27 -0700 Subject: [PATCH 027/214] skip redis tests if redis is not available --- examples/cache-machine/settings.py | 2 ++ fabfile.py | 12 ++++++++++-- requirements.txt | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/examples/cache-machine/settings.py b/examples/cache-machine/settings.py index 811c633..123c69c 100644 --- a/examples/cache-machine/settings.py +++ b/examples/cache-machine/settings.py @@ -17,3 +17,5 @@ INSTALLED_APPS = ( 'django_nose', ) + +SECRET_KEY = 'ok' diff --git a/fabfile.py b/fabfile.py index e953208..824c7a7 100644 --- a/fabfile.py +++ b/fabfile.py @@ -26,8 +26,16 @@ def doc(kind='html'): local('make clean %s' % kind) -SETTINGS = ('locmem_settings', 'settings', 'memcache_byid', - 'redis_settings', 'redis_byid') +SETTINGS = ('locmem_settings', + 'settings', + 'memcache_byid') + +try: + import redis + redis.Redis(host='localhost', port=6379).info() + SETTINGS += ('redis_settings', 'redis_byid') +except Exception: + print 'WARNING: Skipping redis tests.' def test(): for settings in SETTINGS: diff --git a/requirements.txt b/requirements.txt index 8d7dd37..60f66b1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ sphinx mock django-nose --e svn+http://code.djangoproject.com/svn/django/trunk@16922#egg=Django +django==1.4 python-memcached -e git://github.com/jbalogh/test-utils.git#egg=test-utils fabric From 7292a5c4a3864aba109e7c4532829472cc1205c1 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 24 Jan 2012 20:59:46 -0500 Subject: [PATCH 028/214] add CACHE_EMPTY_QUERYSETS setting --- caching/base.py | 3 ++- docs/index.rst | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index d82bad2..40cb507 100644 --- a/caching/base.py +++ b/caching/base.py @@ -24,6 +24,7 @@ def emit(self, record): NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) +CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) class CachingManager(models.Manager): @@ -104,7 +105,7 @@ def __iter__(self): to_cache.append(obj) yield obj except StopIteration: - if to_cache: + if to_cache or CACHE_EMPTY_QUERYSETS: self.cache_objects(to_cache) raise diff --git a/docs/index.rst b/docs/index.rst index 3e734e8..8ae7cab 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -69,6 +69,11 @@ short enough that stale counts won't be a big deal. :: CACHE_COUNT_TIMEOUT = 60 # seconds, not too long. +Due to potential issues with invalidation, caching of empty querysets is turned +off by default. To enable caching of empty querysets, add the following to +your settings file: + + CACHE_EMPTY_QUERYSETS = True Cache Manager ------------- From 06791539ecacc27bab7755e0dbb9a9342ebe7547 Mon Sep 17 00:00:00 2001 From: Andy McKay Date: Mon, 16 Apr 2012 17:54:10 -0700 Subject: [PATCH 029/214] cache empty querysets --- caching/base.py | 3 ++- docs/index.rst | 7 +++++++ tests/test_cache.py | 11 +++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index d82bad2..40cb507 100644 --- a/caching/base.py +++ b/caching/base.py @@ -24,6 +24,7 @@ def emit(self, record): NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) +CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) class CachingManager(models.Manager): @@ -104,7 +105,7 @@ def __iter__(self): to_cache.append(obj) yield obj except StopIteration: - if to_cache: + if to_cache or CACHE_EMPTY_QUERYSETS: self.cache_objects(to_cache) raise diff --git a/docs/index.rst b/docs/index.rst index 3e734e8..7fbe705 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -69,6 +69,13 @@ short enough that stale counts won't be a big deal. :: CACHE_COUNT_TIMEOUT = 60 # seconds, not too long. +Empty querysets +^^^^^^^^^^^^^^^ + +By default cache machine will not cache empty querysets. To cache them:: + + CACHE_EMPTY_QUERYSETS = True + Cache Manager ------------- diff --git a/tests/test_cache.py b/tests/test_cache.py index ea76475..b2ddebe 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -402,6 +402,17 @@ def test_empty_in(self): # Raised an exception before fixing #2. eq_([], list(User.objects.filter(pk__in=[]))) + def test_empty_queryset(self): + for k in (1, 1): + with self.assertNumQueries(k): + eq_(len(Addon.objects.filter(pk=42)), 0) + + @mock.patch('caching.base.CACHE_EMPTY_QUERYSETS', True) + def test_cache_empty_queryset(self): + for k in (1, 0): + with self.assertNumQueries(k): + eq_(len(Addon.objects.filter(pk=42)), 0) + def test_invalidate_empty_queryset(self): u = User.objects.create() eq_(list(u.addon_set.all()), []) From b901096f0e5af474918de52ec483d6b864506e06 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 17 Apr 2012 10:51:35 -0700 Subject: [PATCH 030/214] beware the unicode monster (fixes #21) --- caching/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 40cb507..81d868d 100644 --- a/caching/base.py +++ b/caching/base.py @@ -327,8 +327,9 @@ def __call__(self, *args, **kwargs): k = lambda o: o.cache_key if hasattr(o, 'cache_key') else o arg_keys = map(k, args) kwarg_keys = [(key, k(val)) for key, val in kwargs.items()] - key = 'm:%s:%s:%s:%s' % (self.obj.cache_key, self.func.__name__, - arg_keys, kwarg_keys) + key_parts = ('m', self.obj.cache_key, self.func.__name__, + arg_keys, kwarg_keys) + key = ':'.join(map(encoding.smart_unicode, key_parts)) if key not in self.cache: f = functools.partial(self.func, self.obj, *args, **kwargs) self.cache[key] = cached_with(self.obj, f, key) From 94a38ce6ff44a7922c0ea7b6f1803b8f7996ac86 Mon Sep 17 00:00:00 2001 From: Ben Plesser Date: Mon, 30 Apr 2012 18:25:25 -0300 Subject: [PATCH 031/214] Fix for django weirdness with inheritance for proxy model counts --- caching/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 81d868d..98816ed 100644 --- a/caching/base.py +++ b/caching/base.py @@ -125,9 +125,9 @@ def __init__(self, *args, **kw): def flush_key(self): return flush_key(self.query_key()) - def query_key(self): - sql, params = self.query.get_compiler(using=self.db).as_sql() + clone = self.query.clone() + sql, params = clone.get_compiler(using=self.db).as_sql() return sql % params def iterator(self): From d071e92512f1dcc464cff5cd175672f133796c55 Mon Sep 17 00:00:00 2001 From: Ben Plesser Date: Mon, 30 Apr 2012 18:43:14 -0300 Subject: [PATCH 032/214] Bumped version # for pip install purposes --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 444f6b9..4098cd8 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, 6) +VERSION = (0, 7) __version__ = '.'.join(map(str, VERSION)) From d2a9993af55bf6dd0aa84817687ce8da7a6dc425 Mon Sep 17 00:00:00 2001 From: Ben Plesser Date: Mon, 30 Apr 2012 18:47:11 -0300 Subject: [PATCH 033/214] Bumped version again --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 4098cd8..ccefa86 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, 7) +VERSION = (0, 7.1) __version__ = '.'.join(map(str, VERSION)) From b0f49d5ac8419cc7db1d38e74e64a400e5977625 Mon Sep 17 00:00:00 2001 From: Ben Plesser Date: Fri, 29 Jun 2012 11:56:42 -0300 Subject: [PATCH 034/214] Making Cache Machine play nicely with multiple databases (i.e. master and slave) --- caching/base.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/caching/base.py b/caching/base.py index 98816ed..53bcbd7 100644 --- a/caching/base.py +++ b/caching/base.py @@ -70,14 +70,23 @@ class CacheMachine(object): called to get an iterator over some database results. """ - def __init__(self, query_string, iter_function, timeout=None): + def __init__(self, query_string, iter_function, timeout=None, db='default'): self.query_string = query_string self.iter_function = iter_function self.timeout = timeout + self.db = db def query_key(self): - """Generate the cache key for this query.""" - return make_key('qs:%s' % self.query_string, with_locale=False) + """ + Generate the cache key for this query. + Database router info is included to avoid the scenario where + related cached objects from one DB (e.g. slave) + are saved in another DB (e.g. master), + throwing a Django ValueError in the process. + Django prevents cross DB model saving among related objects. + """ + query_db_string = 'qs:{}::db:{}'.format(self.query_string, self.db) + return make_key(query_db_string, with_locale=False) def __iter__(self): try: @@ -142,7 +151,7 @@ def iterator(self): return iterator() if FETCH_BY_ID: iterator = self.fetch_by_id - return iter(CacheMachine(query_string, iterator, self.timeout)) + return iter(CacheMachine(query_string, iterator, self.timeout, db=self.db)) def fetch_by_id(self): """ From c6a80e8667bce864dc53e6089d74104d98adbc9e Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 29 Jun 2012 09:59:41 -0700 Subject: [PATCH 035/214] make the format string unicode to avoid the unicode monster --- caching/base.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/caching/base.py b/caching/base.py index 53bcbd7..8b3bd86 100644 --- a/caching/base.py +++ b/caching/base.py @@ -79,13 +79,13 @@ def __init__(self, query_string, iter_function, timeout=None, db='default'): def query_key(self): """ Generate the cache key for this query. - Database router info is included to avoid the scenario where - related cached objects from one DB (e.g. slave) - are saved in another DB (e.g. master), - throwing a Django ValueError in the process. - Django prevents cross DB model saving among related objects. + + Database router info is included to avoid the scenario where related + cached objects from one DB (e.g. slave) are saved in another DB (e.g. + master), throwing a Django ValueError in the process. Django prevents + cross DB model saving among related objects. """ - query_db_string = 'qs:{}::db:{}'.format(self.query_string, self.db) + query_db_string = u'qs:{}::db:{}'.format(self.query_string, self.db) return make_key(query_db_string, with_locale=False) def __iter__(self): From 3b998e3b876018bc6f490e79abf6416004cde146 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Thu, 5 Jul 2012 09:55:09 -0700 Subject: [PATCH 036/214] use old-style format strings --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 8b3bd86..cf60a13 100644 --- a/caching/base.py +++ b/caching/base.py @@ -85,7 +85,7 @@ def query_key(self): master), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = u'qs:{}::db:{}'.format(self.query_string, self.db) + query_db_string = u'qs:%s::db:%s' % (self.query_string, self.db) return make_key(query_db_string, with_locale=False) def __iter__(self): From 869700c2078bade4e07a1db7b202096b10af6f15 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 16:48:26 +0200 Subject: [PATCH 037/214] Renamed test settings package to have a correct name. --- examples/{cache-machine => cache_machine}/__init__.py | 0 examples/{cache-machine => cache_machine}/locmem_settings.py | 0 examples/{cache-machine => cache_machine}/memcache_byid.py | 0 examples/{cache-machine => cache_machine}/redis_byid.py | 0 examples/{cache-machine => cache_machine}/redis_settings.py | 0 examples/{cache-machine => cache_machine}/settings.py | 2 +- fabfile.py | 2 +- 7 files changed, 2 insertions(+), 2 deletions(-) rename examples/{cache-machine => cache_machine}/__init__.py (100%) rename examples/{cache-machine => cache_machine}/locmem_settings.py (100%) rename examples/{cache-machine => cache_machine}/memcache_byid.py (100%) rename examples/{cache-machine => cache_machine}/redis_byid.py (100%) rename examples/{cache-machine => cache_machine}/redis_settings.py (100%) rename examples/{cache-machine => cache_machine}/settings.py (92%) diff --git a/examples/cache-machine/__init__.py b/examples/cache_machine/__init__.py similarity index 100% rename from examples/cache-machine/__init__.py rename to examples/cache_machine/__init__.py diff --git a/examples/cache-machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py similarity index 100% rename from examples/cache-machine/locmem_settings.py rename to examples/cache_machine/locmem_settings.py diff --git a/examples/cache-machine/memcache_byid.py b/examples/cache_machine/memcache_byid.py similarity index 100% rename from examples/cache-machine/memcache_byid.py rename to examples/cache_machine/memcache_byid.py diff --git a/examples/cache-machine/redis_byid.py b/examples/cache_machine/redis_byid.py similarity index 100% rename from examples/cache-machine/redis_byid.py rename to examples/cache_machine/redis_byid.py diff --git a/examples/cache-machine/redis_settings.py b/examples/cache_machine/redis_settings.py similarity index 100% rename from examples/cache-machine/redis_settings.py rename to examples/cache_machine/redis_settings.py diff --git a/examples/cache-machine/settings.py b/examples/cache_machine/settings.py similarity index 92% rename from examples/cache-machine/settings.py rename to examples/cache_machine/settings.py index 123c69c..4aa69c4 100644 --- a/examples/cache-machine/settings.py +++ b/examples/cache_machine/settings.py @@ -9,7 +9,7 @@ DATABASES = { 'default': { - 'NAME': 'test.db', + 'NAME': ':memory:', 'ENGINE': 'django.db.backends.sqlite3', } } diff --git a/fabfile.py b/fabfile.py index 824c7a7..fb6a9d0 100644 --- a/fabfile.py +++ b/fabfile.py @@ -40,7 +40,7 @@ def doc(kind='html'): def test(): for settings in SETTINGS: print settings - os.environ['DJANGO_SETTINGS_MODULE'] = 'cache-machine.%s' % settings + os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings local('django-admin.py test') From 28302151dea79ffc2bb7acb2a5ffc7175ad2754c Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 16:49:48 +0200 Subject: [PATCH 038/214] Moved timeout setting assignment to module to lower number of getattr calls. --- caching/base.py | 6 +++--- tests/test_cache.py | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/caching/base.py b/caching/base.py index cf60a13..ccec246 100644 --- a/caching/base.py +++ b/caching/base.py @@ -25,6 +25,7 @@ def emit(self, record): CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) +TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) class CachingManager(models.Manager): @@ -198,13 +199,12 @@ def fetch_missed(self, pks): return others def count(self): - timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) super_count = super(CachingQuerySet, self).count query_string = 'count:%s' % self.query_key() - if self.timeout == NO_CACHE or timeout is None: + if self.timeout == NO_CACHE or TIMEOUT is None: return super_count() else: - return cached_with(self, super_count, query_string, timeout) + return cached_with(self, super_count, query_string, TIMEOUT) def cache(self, timeout=None): qs = self._clone() diff --git a/tests/test_cache.py b/tests/test_cache.py index b2ddebe..5a10736 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -20,12 +20,12 @@ class CachingTestCase(ExtraAppTestCase): def setUp(self): cache.clear() - self.old_timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) + self.old_timeout = caching.TIMEOUT if getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): invalidation.redis.flushall() def tearDown(self): - settings.CACHE_COUNT_TIMEOUT = self.old_timeout + caching.TIMEOUT = self.old_timeout def test_flush_key(self): """flush_key should work for objects or strings.""" @@ -145,7 +145,7 @@ def test_raw_cache_params(self): @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): - settings.CACHE_COUNT_TIMEOUT = 60 + caching.TIMEOUT = 60 cache_mock.scheme = 'memcached' cache_mock.get.return_value = None @@ -159,13 +159,13 @@ def test_count_cache(self, cache_mock): @mock.patch('caching.base.cached') def test_count_none_timeout(self, cached_mock): - settings.CACHE_COUNT_TIMEOUT = None + caching.TIMEOUT = None Addon.objects.count() eq_(cached_mock.call_count, 0) @mock.patch('caching.base.cached') def test_count_nocache(self, cached_mock): - settings.CACHE_COUNT_TIMEOUT = 60 + caching.TIMEOUT = 60 Addon.objects.no_cache().count() eq_(cached_mock.call_count, 0) From dfc9465d04f75cb1738cd581bf9dea00f07bb278 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 16:50:08 +0200 Subject: [PATCH 039/214] Super tiny cleanup. --- caching/backends/memcached.py | 1 - caching/base.py | 1 + caching/invalidation.py | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index f0b6670..3fd5e2f 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,5 +1,4 @@ from django.core.cache.backends import memcached -from django.utils.encoding import smart_str # Add infinite timeout support to the memcached backend. diff --git a/caching/base.py b/caching/base.py index ccec246..03763d8 100644 --- a/caching/base.py +++ b/caching/base.py @@ -135,6 +135,7 @@ def __init__(self, *args, **kw): def flush_key(self): return flush_key(self.query_key()) + def query_key(self): clone = self.query.clone() sql, params = clone.get_compiler(using=self.db).as_sql() diff --git a/caching/invalidation.py b/caching/invalidation.py index bd6e7e4..90c6deb 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -64,7 +64,6 @@ def wrapper(*args, **kw): return decorator - class Invalidator(object): def invalidate_keys(self, keys): From ff9f9d1a05345480f3e1c5735aae23683b1ab1b6 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 17:00:39 +0200 Subject: [PATCH 040/214] Added ability to define an own cache backend in Django >= 1.3. --- caching/base.py | 3 +-- caching/invalidation.py | 9 ++++++++- docs/index.rst | 19 +++++++++++++++++++ examples/cache_machine/custom_backend.py | 11 +++++++++++ fabfile.py | 3 ++- tests/test_cache.py | 3 ++- 6 files changed, 43 insertions(+), 5 deletions(-) create mode 100644 examples/cache_machine/custom_backend.py diff --git a/caching/base.py b/caching/base.py index 03763d8..3162910 100644 --- a/caching/base.py +++ b/caching/base.py @@ -2,13 +2,12 @@ import logging from django.conf import settings -from django.core.cache import cache from django.db import models from django.db.models import signals from django.db.models.sql import query from django.utils import encoding -from .invalidation import invalidator, flush_key, make_key, byid +from .invalidation import invalidator, flush_key, make_key, byid, cache class NullHandler(logging.Handler): diff --git a/caching/invalidation.py b/caching/invalidation.py index 90c6deb..00b3126 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -5,7 +5,8 @@ import socket from django.conf import settings -from django.core.cache import cache, parse_backend_uri +from django.core.cache import cache as default_cache, get_cache, parse_backend_uri +from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation try: @@ -13,6 +14,12 @@ except ImportError: redislib = None +# Look for an own cache first before falling back to the default cache +try: + cache = get_cache('cache_machine') +except InvalidCacheBackendError: + cache = default_cache + CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) diff --git a/docs/index.rst b/docs/index.rst index 7fbe705..a376c70 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -57,6 +57,25 @@ Note that we have to specify the class, not the module, for the ``BACKEND`` property, and that the ``PREFIX`` is optional. The ``LOCATION`` may be a string, instead of a list, if you only have one server. +If you require the default cache backend to be a different type of +backend or want Cache Machine to use a specific caching server simply +define a seperate ``cache_machine`` entry for the ``CACHES`` setting, +e.g.:: + + CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.memcached.CacheClass', + 'LOCATION': 'server-1:11211', + }, + 'cache_machine': { + 'BACKEND': 'caching.backends.memcached.CacheClass', + 'LOCATION': [ + 'server-1:11211', + 'server-2:11211', + ], + 'PREFIX': 'weee:', + }, + } COUNT queries ^^^^^^^^^^^^^ diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py new file mode 100644 index 0000000..7ecfc1e --- /dev/null +++ b/examples/cache_machine/custom_backend.py @@ -0,0 +1,11 @@ +from settings import * + +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.CacheClass', + }, + 'cache_machine': { + 'BACKEND': 'caching.backends.memcached.CacheClass', + 'LOCATION': 'localhost:11211', + }, +} diff --git a/fabfile.py b/fabfile.py index fb6a9d0..a6ed3bd 100644 --- a/fabfile.py +++ b/fabfile.py @@ -28,7 +28,8 @@ def doc(kind='html'): SETTINGS = ('locmem_settings', 'settings', - 'memcache_byid') + 'memcache_byid', + 'custom_backend') try: import redis diff --git a/tests/test_cache.py b/tests/test_cache.py index 5a10736..fa86885 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- from django.conf import settings -from django.core.cache import cache from django.utils import translation, encoding import jinja2 @@ -11,6 +10,8 @@ import caching.base as caching from caching import invalidation +cache = invalidation.cache + from testapp.models import Addon, User From bd6039e8f8e4044ae51846efbd1ed47f58881d21 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 17:00:50 +0200 Subject: [PATCH 041/214] Updated Django requirement to 1.4.1. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 60f66b1..22be467 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ sphinx mock django-nose -django==1.4 +django==1.4.1 python-memcached -e git://github.com/jbalogh/test-utils.git#egg=test-utils fabric From c98734c2225e5604bdb92a112c324276633e5a01 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 28 Sep 2012 18:04:03 +0200 Subject: [PATCH 042/214] Clarified documentation a bit. --- docs/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index a376c70..9b0abc6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -58,9 +58,9 @@ property, and that the ``PREFIX`` is optional. The ``LOCATION`` may be a string, instead of a list, if you only have one server. If you require the default cache backend to be a different type of -backend or want Cache Machine to use a specific caching server simply -define a seperate ``cache_machine`` entry for the ``CACHES`` setting, -e.g.:: +cache backend or want Cache Machine to use specific cache server +options simply define a separate ``cache_machine`` entry for the +``CACHES`` setting, e.g.:: CACHES = { 'default': { From 56f2052b1581772cde472f66b7e00d7799b39caf Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:05:12 +0200 Subject: [PATCH 043/214] Bumped up version slightly. --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index ccefa86..9584b3b 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, 7.1) +VERSION = (0, "8a1") __version__ = '.'.join(map(str, VERSION)) From d7b21301ca07e8401d3145e416b2f9fac9fe4a24 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:16:17 +0200 Subject: [PATCH 044/214] Added egg-info to list of ignored files. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index a2220da..46dd20a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ docs/_build *.py[co] +*.egg-info \ No newline at end of file From a9bbfc0fbc4cd5a87890710cc42e9bab9a1c545d Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:23:03 +0200 Subject: [PATCH 045/214] Made the CachingMixin a new-style class to be able to use super properly. --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 3162910..b38e126 100644 --- a/caching/base.py +++ b/caching/base.py @@ -220,7 +220,7 @@ def _clone(self, *args, **kw): return qs -class CachingMixin: +class CachingMixin(object): """Inherit from this class to get caching and invalidation helpers.""" def flush_key(self): From 423012437d7ecc4ff361530f72edfc526f4f2a4f Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:24:40 +0200 Subject: [PATCH 046/214] Correctly pass version to locmem cache backend. --- caching/backends/locmem.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index ca955f3..35891f4 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -7,12 +7,12 @@ class CacheClass(locmem.CacheClass): def add(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity - return super(CacheClass, self).add(key, value, timeout) + return super(CacheClass, self).add(key, value, timeout, version) def set(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity - return super(CacheClass, self).set(key, value, timeout) + return super(CacheClass, self).set(key, value, timeout, version) class _Infinity(object): From 91c56cd95bf774fccd78b47d70d384e02048278a Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:25:57 +0200 Subject: [PATCH 047/214] Sublcass cache backend classes that are not deprecated instead. --- caching/backends/locmem.py | 17 ++++++++++++++--- caching/backends/memcached.py | 19 ++++++++++++++++--- docs/index.rst | 15 +++++++++++---- examples/cache_machine/settings.py | 2 +- 4 files changed, 42 insertions(+), 11 deletions(-) diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index 35891f4..8787e3b 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -1,18 +1,29 @@ +import django from django.core.cache.backends import locmem # Add infinite timeout support to the locmem backend. Useful for testing. -class CacheClass(locmem.CacheClass): +class InfinityMixin(object): def add(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity - return super(CacheClass, self).add(key, value, timeout, version) + return super(InfinityMixin, self).add(key, value, timeout, version) def set(self, key, value, timeout=None, version=None): if timeout == 0: timeout = Infinity - return super(CacheClass, self).set(key, value, timeout, version) + return super(InfinityMixin, self).set(key, value, timeout, version) + + +class CacheClass(InfinityMixin, locmem.CacheClass): + pass + + +if django.VERSION[:2] >= (1, 3): + + class LocMemCache(InfinityMixin, locmem.LocMemCache): + pass class _Infinity(object): diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index 3fd5e2f..69244a2 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,15 +1,28 @@ +import django from django.core.cache.backends import memcached # Add infinite timeout support to the memcached backend. -class CacheClass(memcached.CacheClass): +class InfinityMixin(object): def add(self, key, value, timeout=None, version=None): if timeout is None: timeout = self.default_timeout - return super(CacheClass, self).add(key, value, timeout, version) + return super(InfinityMixin, self).add(key, value, timeout, version) def set(self, key, value, timeout=None, version=None): if timeout is None: timeout = self.default_timeout - return super(CacheClass, self).set(key, value, timeout, version) + return super(InfinityMixin, self).set(key, value, timeout, version) + + +class CacheClass(InfinityMixin, memcached.CacheClass): + pass + +if django.VERSION[:2] >= (1, 3): + + class MemcachedCache(InfinityMixin, memcached.MemcachedCache): + pass + + class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): + pass diff --git a/docs/index.rst b/docs/index.rst index 9b0abc6..f13129d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -44,7 +44,7 @@ With Django 1.3 or higher, you should use the new ``CACHES`` setting:: CACHES = { 'default': { - 'BACKEND': 'caching.backends.memcached.CacheClass', + 'BACKEND': 'caching.backends.memcached.MemcachedCache', 'LOCATION': [ 'server-1:11211', 'server-2:11211', @@ -64,11 +64,11 @@ options simply define a separate ``cache_machine`` entry for the CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.CacheClass', + 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'server-1:11211', }, 'cache_machine': { - 'BACKEND': 'caching.backends.memcached.CacheClass', + 'BACKEND': 'caching.backends.memcached.MemcachedCache', 'LOCATION': [ 'server-1:11211', 'server-2:11211', @@ -77,6 +77,14 @@ options simply define a separate ``cache_machine`` entry for the }, } +.. note:: + + Cache Machine also supports the other memcache backend support by + Django >= 1.3 based on pylibmbc_: + ``caching.backends.memcached.PyLibMCCache``. + +.. _pylibmc: http://sendapatch.se/projects/pylibmc/ + COUNT queries ^^^^^^^^^^^^^ @@ -95,7 +103,6 @@ By default cache machine will not cache empty querysets. To cache them:: CACHE_EMPTY_QUERYSETS = True - Cache Manager ------------- diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 4aa69c4..3ac5a8d 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -1,6 +1,6 @@ CACHES = { 'default': { - 'BACKEND': 'caching.backends.memcached.CacheClass', + 'BACKEND': 'caching.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', }, } From eb993163d49a3a7fc9c541d8b18e93125ab960cd Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:26:09 +0200 Subject: [PATCH 048/214] Added initial Travis configuration. --- .travis.yml | 19 +++++++++++++++++++ requirements.txt | 1 - 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..c3586fd --- /dev/null +++ b/.travis.yml @@ -0,0 +1,19 @@ +language: python +services: + - memcached + - redis-server +python: + - "2.5" + - "2.6" + - "2.7" +before_install: + - export PIP_USE_MIRRORS=true + - export PIP_INDEX_URL=https://simple.crate.io/ +install: + - pip install -e . + - pip install -r requirements.txt Django==$DJANGO +script: + - fab test +env: + - DJANGO=1.3.3 + - DJANGO=1.4.1 diff --git a/requirements.txt b/requirements.txt index 22be467..56a7cc4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ sphinx mock django-nose -django==1.4.1 python-memcached -e git://github.com/jbalogh/test-utils.git#egg=test-utils fabric From 9396b371efb45df4701db35d1fe9717dc81bea06 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:32:00 +0200 Subject: [PATCH 049/214] Also handle ValueError on Django 1.3.x as it doesn't raise the InvalidCacheBackendError correctly. --- caching/invalidation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 00b3126..b55fe07 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -17,7 +17,7 @@ # Look for an own cache first before falling back to the default cache try: cache = get_cache('cache_machine') -except InvalidCacheBackendError: +except (InvalidCacheBackendError, ValueError): cache = default_cache From 0eae83be3792b1857388bc973ce5acb38e8773eb Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 1 Oct 2012 12:36:31 +0200 Subject: [PATCH 050/214] Removed 2.5 from Travis environments as there are clearly features dependingng on using Python > 2.5. --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c3586fd..5031102 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,6 @@ services: - memcached - redis-server python: - - "2.5" - "2.6" - "2.7" before_install: From 4f155cdc40aa32f88c9c05bfbda87ccecad0fb5e Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Tue, 13 Nov 2012 17:15:48 +0100 Subject: [PATCH 051/214] Pass model db to queryset class on instantiation. --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index b38e126..c9f9f44 100644 --- a/caching/base.py +++ b/caching/base.py @@ -33,7 +33,7 @@ class CachingManager(models.Manager): use_for_related_fields = True def get_query_set(self): - return CachingQuerySet(self.model) + return CachingQuerySet(self.model, using=self._db) def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) From 33fe9696605eae3d1eaaed9374665e6748eab6f2 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 27 Mar 2013 10:59:06 -0700 Subject: [PATCH 052/214] add travisci and rtfd.org to the readme --- README.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index c1670f2..0440bd9 100644 --- a/README.rst +++ b/README.rst @@ -5,13 +5,16 @@ Cache Machine Cache Machine provides automatic caching and invalidation for Django models through the ORM. -For full docs, see http://jbalogh.me/projects/cache-machine. +For full docs, see https://cache-machine.readthedocs.org/en/latest/. + +.. image:: https://travis-ci.org/jbalogh/django-cache-machine.png + :target: https://travis-ci.org/jbalogh/django-cache-machine Requirements ------------ -Cache Machine requires Django 1.3. It was written and tested on Python 2.6. +Cache Machine requires Django 1.3+. It was written and tested on Python 2.6. Installation From fd9c7efbd14dfb78f278d7b270cfe7a9da1b79b7 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Wed, 27 Mar 2013 11:02:05 -0700 Subject: [PATCH 053/214] test with django 1.5 on travis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 5031102..b1a3acb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,3 +16,4 @@ script: env: - DJANGO=1.3.3 - DJANGO=1.4.1 + - DJANGO=1.5 From 48e56f1a9f5a5d8d6e39927596c4a0eec6665704 Mon Sep 17 00:00:00 2001 From: Tim Gross Date: Thu, 27 Dec 2012 21:14:21 -0500 Subject: [PATCH 054/214] Add tests for multi-DB and remove test-utils dependency --- .gitignore | 2 +- examples/cache_machine/settings.py | 7 ++++++- requirements.txt | 1 - tests/test_cache.py | 26 ++++++++++++++++++++++++-- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 46dd20a..7782d6a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,3 @@ docs/_build *.py[co] -*.egg-info \ No newline at end of file +*.egg-info diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 3ac5a8d..b6a43df 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -11,11 +11,16 @@ 'default': { 'NAME': ':memory:', 'ENGINE': 'django.db.backends.sqlite3', - } + }, + 'slave': { + 'NAME': 'test_slave.db', + 'ENGINE': 'django.db.backends.sqlite3', + } } INSTALLED_APPS = ( 'django_nose', + 'tests.testapp', ) SECRET_KEY = 'ok' diff --git a/requirements.txt b/requirements.txt index 56a7cc4..ed588ee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ sphinx mock django-nose python-memcached --e git://github.com/jbalogh/test-utils.git#egg=test-utils fabric jinja2 redis diff --git a/tests/test_cache.py b/tests/test_cache.py index fa86885..0ac793c 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- from django.conf import settings +from django.test import TestCase from django.utils import translation, encoding import jinja2 import mock from nose.tools import eq_ -from test_utils import ExtraAppTestCase import caching.base as caching from caching import invalidation @@ -15,7 +15,8 @@ from testapp.models import Addon, User -class CachingTestCase(ExtraAppTestCase): +class CachingTestCase(TestCase): + multi_db = True fixtures = ['testapp/test_cache.json'] extra_apps = ['tests.testapp'] @@ -439,3 +440,24 @@ def test_get_flush_lists_none(self, cache_mock): if not getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): cache_mock.return_value.values.return_value = [None, [1]] eq_(caching.invalidator.get_flush_lists(None), set([1])) + + def test_multidb_cache(self): + """ Test where master and slave DB result in two different cache keys """ + assert Addon.objects.get(id=1).from_cache is False + assert Addon.objects.get(id=1).from_cache is True + + from_slave = Addon.objects.using('slave').get(id=1) + assert from_slave.from_cache is False + assert from_slave._state.db == 'slave' + + def test_multidb_fetch_by_id(self): + """ Test where master and slave DB result in two different cache keys with FETCH_BY_ID""" + with self.settings(FETCH_BY_ID=True): + assert Addon.objects.get(id=1).from_cache is False + assert Addon.objects.get(id=1).from_cache is True + + from_slave = Addon.objects.using('slave').get(id=1) + assert from_slave.from_cache is False + assert from_slave._state.db == 'slave' + + From 184e18eb7a0a7c2d579d190280342abff34d10b8 Mon Sep 17 00:00:00 2001 From: Tim Gross Date: Wed, 2 Jan 2013 13:34:31 -0500 Subject: [PATCH 055/214] Remove 5sec timeout in cache invalidation --- caching/invalidation.py | 2 +- tests/test_cache.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index b55fe07..c9c5977 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -80,7 +80,7 @@ def invalidate_keys(self, keys): flush, flush_keys = self.find_flush_lists(keys) if flush: - cache.set_many(dict((k, None) for k in flush), 5) + cache.delete_many(flush) if flush_keys: self.clear_flush_lists(flush_keys) diff --git a/tests/test_cache.py b/tests/test_cache.py index 0ac793c..7ca177a 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -68,6 +68,10 @@ def test_invalidation(self): a = [x for x in Addon.objects.all() if x.id == 1][0] assert a.from_cache is False + assert Addon.objects.get(id=1).from_cache is True + a = [x for x in Addon.objects.all() if x.id == 1][0] + assert a.from_cache is True + def test_invalidation_cross_locale(self): assert Addon.objects.get(id=1).from_cache is False a = [x for x in Addon.objects.all() if x.id == 1][0] @@ -85,9 +89,6 @@ def test_invalidation_cross_locale(self): assert a.from_cache is True a.save() - assert Addon.objects.get(id=1).from_cache is False - a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False translation.activate(old_locale) assert Addon.objects.get(id=1).from_cache is False From 38ee62ee5b8c0d114f4b3eb5c4d24c8e7cb2d441 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 12 Apr 2013 10:06:16 -0700 Subject: [PATCH 056/214] slip the db into obj.cache_key --- caching/base.py | 10 +++++----- caching/invalidation.py | 2 +- tests/test_cache.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/caching/base.py b/caching/base.py index c9f9f44..3a75bc7 100644 --- a/caching/base.py +++ b/caching/base.py @@ -167,7 +167,7 @@ def fetch_by_id(self): # order_by. vals = self.values_list('pk', *self.query.extra.keys()) pks = [val[0] for val in vals] - keys = dict((byid(self.model._cache_key(pk)), pk) for pk in pks) + keys = dict((byid(self.model._cache_key(pk, self.db)), pk) for pk in pks) cached = dict((k, v) for k, v in cache.get_many(keys).items() if v is not None) @@ -229,16 +229,16 @@ def flush_key(self): @property def cache_key(self): """Return a cache key based on the object's primary key.""" - return self._cache_key(self.pk) + return self._cache_key(self.pk, self._state.db) @classmethod - def _cache_key(cls, pk): + def _cache_key(cls, pk, db): """ Return a string that uniquely identifies the object. For the Addon class, with a pk of 2, we get "o:addons.addon:2". """ - key_parts = ('o', cls._meta, pk) + key_parts = ('o', cls._meta, pk, db) return ':'.join(map(encoding.smart_unicode, key_parts)) def _cache_keys(self): @@ -246,7 +246,7 @@ def _cache_keys(self): fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields if isinstance(f, models.ForeignKey)) - keys = [fk.rel.to._cache_key(val) for fk, val in fks.items() + keys = [fk.rel.to._cache_key(val, self._state.db) for fk, val in fks.items() if val is not None and hasattr(fk.rel.to, '_cache_key')] return (self.cache_key,) + tuple(keys) diff --git a/caching/invalidation.py b/caching/invalidation.py index c9c5977..17e8c21 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -45,7 +45,7 @@ def flush_key(obj): def byid(obj): - key = obj if isinstance(obj, basestring) else obj._cache_key(obj.pk) + key = obj if isinstance(obj, basestring) else obj.cache_key return make_key('byid:' + key) diff --git a/tests/test_cache.py b/tests/test_cache.py index 7ca177a..491bdc3 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -36,7 +36,7 @@ def test_flush_key(self): def test_cache_key(self): a = Addon.objects.get(id=1) - eq_(a.cache_key, 'o:testapp.addon:1') + eq_(a.cache_key, 'o:testapp.addon:1:default') keys = set((a.cache_key, a.author1.cache_key, a.author2.cache_key)) eq_(set(a._cache_keys()), keys) From 7b4762eccbac4eb469ff1d4e52503a9509f902d7 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 12 Apr 2013 10:50:42 -0700 Subject: [PATCH 057/214] backport TestCase.settings for Django 1.3 --- tests/test_cache.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/test_cache.py b/tests/test_cache.py index 491bdc3..b41dead 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import django from django.conf import settings from django.test import TestCase from django.utils import translation, encoding @@ -14,6 +15,22 @@ from testapp.models import Addon, User +if django.get_version().startswith('1.3'): + class settings_patch(object): + def __init__(self, **kwargs): + self.options = kwargs + + def __enter__(self): + self._old_settings = dict((k, getattr(settings, k, None)) for k in self.options) + for k, v in self.options.items(): + setattr(settings, k, v) + + def __exit__(self, *args): + for k in self.options: + setattr(settings, k, self._old_settings[k]) + + TestCase.settings = settings_patch + class CachingTestCase(TestCase): multi_db = True From 4955975a2eb3e211602b71652beb6accd883efb8 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Fri, 3 May 2013 10:56:55 -0700 Subject: [PATCH 058/214] bump to 0.8 --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 9584b3b..00a5e09 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, "8a1") +VERSION = (0, '8') __version__ = '.'.join(map(str, VERSION)) From e8011a33df1168a35bf0a3715c86831377e2c6de Mon Sep 17 00:00:00 2001 From: Jason Normore Date: Tue, 14 May 2013 20:11:10 -0400 Subject: [PATCH 059/214] Adds timeout option for raw query --- caching/base.py | 17 +++++++++++++---- tests/test_cache.py | 10 ++++++++++ 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/caching/base.py b/caching/base.py index 3a75bc7..51c123e 100644 --- a/caching/base.py +++ b/caching/base.py @@ -253,12 +253,21 @@ def _cache_keys(self): class CachingRawQuerySet(models.query.RawQuerySet): + def __init__(self, *args, **kw): + timeout = kw.pop('timeout', None) + super(CachingRawQuerySet, self).__init__(*args, **kw) + self.timeout = timeout + def __iter__(self): iterator = super(CachingRawQuerySet, self).__iter__ - sql = self.raw_query % tuple(self.params) - for obj in CacheMachine(sql, iterator): - yield obj - raise StopIteration + if self.timeout == NO_CACHE: + iterator = iterator() + while True: yield iterator.next() + else: + sql = self.raw_query % tuple(self.params) + for obj in CacheMachine(sql, iterator, timeout=self.timeout): + yield obj + raise StopIteration def _function_cache_key(key): diff --git a/tests/test_cache.py b/tests/test_cache.py index b41dead..c61f1f5 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -163,6 +163,16 @@ def test_raw_cache_params(self): raw2 = list(Addon.objects.raw(sql, [2]))[0] eq_(raw2.id, 2) + @mock.patch('caching.base.CacheMachine') + def test_raw_nocache(self, CacheMachine): + caching.TIMEOUT = 60 + sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table + raw = list(Addon.objects.raw(sql, timeout=-1)) + eq_(len(raw), 1) + raw_addon = raw[0] + assert not hasattr(raw_addon, 'from_cache') + assert not CacheMachine.called + @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): caching.TIMEOUT = 60 From 26d88cf0a0c918f78df7c8403b0384b7998af953 Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Sat, 9 Nov 2013 21:31:56 +1100 Subject: [PATCH 060/214] Ported to support Django 1.6, in which the underlying caching API has changed. See https://docs.djangoproject.com/en/dev/releases/1.6/#miscellaneous. To support both Django 1.6 and < 1.6 simultaniously, a new constant, DEFAULT_TIMEOUT, was added to replace the use of the None timeout value. This is consistent with how caching works in Django 1.6. Part of this porting also fixes #44. Updated the documentation accordingly, and added some points on changing timeout values, including NO_CACHE. --- caching/backends/locmem.py | 46 +++++++------ caching/backends/memcached.py | 20 +++--- caching/base.py | 23 +++---- caching/compat.py | 12 ++++ docs/index.rst | 82 +++++++++++++++--------- examples/cache_machine/custom_backend.py | 4 +- tests/test_cache.py | 37 +++++------ 7 files changed, 129 insertions(+), 95 deletions(-) create mode 100644 caching/compat.py diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index 8787e3b..db0a0ad 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -1,17 +1,38 @@ import django from django.core.cache.backends import locmem +from caching.compat import DEFAULT_TIMEOUT, FOREVER + + +if django.VERSION[:2] >= (1, 6): + Infinity = FOREVER +else: + class _Infinity(object): + """Always compares greater than numbers.""" + + def __radd__(self, _): + return self + + def __cmp__(self, o): + return 0 if self is o else 1 + + def __repr__(self): + return 'Infinity' + + Infinity = _Infinity() + del _Infinity + # Add infinite timeout support to the locmem backend. Useful for testing. class InfinityMixin(object): - def add(self, key, value, timeout=None, version=None): - if timeout == 0: + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + if timeout == FOREVER: timeout = Infinity return super(InfinityMixin, self).add(key, value, timeout, version) - def set(self, key, value, timeout=None, version=None): - if timeout == 0: + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + if timeout == FOREVER: timeout = Infinity return super(InfinityMixin, self).set(key, value, timeout, version) @@ -21,22 +42,5 @@ class CacheClass(InfinityMixin, locmem.CacheClass): if django.VERSION[:2] >= (1, 3): - class LocMemCache(InfinityMixin, locmem.LocMemCache): pass - - -class _Infinity(object): - """Always compares greater than numbers.""" - - def __radd__(self, _): - return self - - def __cmp__(self, o): - return 0 if self is o else 1 - - def __repr__(self): - return 'Infinity' - -Infinity = _Infinity() -del _Infinity diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index 69244a2..55a59b2 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,28 +1,28 @@ import django from django.core.cache.backends import memcached +from caching.compat import DEFAULT_TIMEOUT + # Add infinite timeout support to the memcached backend. class InfinityMixin(object): - def add(self, key, value, timeout=None, version=None): - if timeout is None: - timeout = self.default_timeout + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): return super(InfinityMixin, self).add(key, value, timeout, version) - def set(self, key, value, timeout=None, version=None): - if timeout is None: - timeout = self.default_timeout + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): return super(InfinityMixin, self).set(key, value, timeout, version) -class CacheClass(InfinityMixin, memcached.CacheClass): - pass - if django.VERSION[:2] >= (1, 3): - class MemcachedCache(InfinityMixin, memcached.MemcachedCache): pass class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): pass + + class CacheClass(MemcachedCache): + pass +else: + class CacheClass(InfinityMixin, memcached.CacheClass): + pass diff --git a/caching/base.py b/caching/base.py index 51c123e..3c772e3 100644 --- a/caching/base.py +++ b/caching/base.py @@ -7,6 +7,7 @@ from django.db.models.sql import query from django.utils import encoding +from .compat import DEFAULT_TIMEOUT, FOREVER from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -19,12 +20,11 @@ def emit(self, record): log = logging.getLogger('caching') log.addHandler(NullHandler()) -FOREVER = 0 NO_CACHE = -1 CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) -TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', None) +TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', DEFAULT_TIMEOUT) class CachingManager(models.Manager): @@ -55,7 +55,7 @@ def raw(self, raw_query, params=None, *args, **kwargs): return CachingRawQuerySet(raw_query, self.model, params=params, using=self._db, *args, **kwargs) - def cache(self, timeout=None): + def cache(self, timeout=DEFAULT_TIMEOUT): return self.get_query_set().cache(timeout) def no_cache(self): @@ -70,7 +70,7 @@ class CacheMachine(object): called to get an iterator over some database results. """ - def __init__(self, query_string, iter_function, timeout=None, db='default'): + def __init__(self, query_string, iter_function, timeout=DEFAULT_TIMEOUT, db='default'): self.query_string = query_string self.iter_function = iter_function self.timeout = timeout @@ -130,7 +130,7 @@ class CachingQuerySet(models.query.QuerySet): def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) - self.timeout = None + self.timeout = DEFAULT_TIMEOUT def flush_key(self): return flush_key(self.query_key()) @@ -201,12 +201,12 @@ def fetch_missed(self, pks): def count(self): super_count = super(CachingQuerySet, self).count query_string = 'count:%s' % self.query_key() - if self.timeout == NO_CACHE or TIMEOUT is None: + if self.timeout == NO_CACHE or TIMEOUT == NO_CACHE: return super_count() else: return cached_with(self, super_count, query_string, TIMEOUT) - def cache(self, timeout=None): + def cache(self, timeout=DEFAULT_TIMEOUT): qs = self._clone() qs.timeout = timeout return qs @@ -254,7 +254,7 @@ def _cache_keys(self): class CachingRawQuerySet(models.query.RawQuerySet): def __init__(self, *args, **kw): - timeout = kw.pop('timeout', None) + timeout = kw.pop('timeout', DEFAULT_TIMEOUT) super(CachingRawQuerySet, self).__init__(*args, **kw) self.timeout = timeout @@ -262,7 +262,8 @@ def __iter__(self): iterator = super(CachingRawQuerySet, self).__iter__ if self.timeout == NO_CACHE: iterator = iterator() - while True: yield iterator.next() + while True: + yield iterator.next() else: sql = self.raw_query % tuple(self.params) for obj in CacheMachine(sql, iterator, timeout=self.timeout): @@ -274,7 +275,7 @@ def _function_cache_key(key): return make_key('f:%s' % key, with_locale=True) -def cached(function, key_, duration=None): +def cached(function, key_, duration=DEFAULT_TIMEOUT): """Only calls the function if ``key`` is not already in the cache.""" key = _function_cache_key(key_) val = cache.get(key) @@ -287,7 +288,7 @@ def cached(function, key_, duration=None): return val -def cached_with(obj, f, f_key, timeout=None): +def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): """Helper for caching a function call within an object's flush list.""" try: obj_key = (obj.query_key() if hasattr(obj, 'query_key') diff --git a/caching/compat.py b/caching/compat.py new file mode 100644 index 0000000..3f5c7de --- /dev/null +++ b/caching/compat.py @@ -0,0 +1,12 @@ +import django + +__all__ = ['DEFAULT_TIMEOUT', 'FOREVER'] + + +if django.VERSION[:2] >= (1, 6): + from django.core.cache.backends.base import DEFAULT_TIMEOUT as DJANGO_DEFAULT_TIMEOUT + DEFAULT_TIMEOUT = DJANGO_DEFAULT_TIMEOUT + FOREVER = None +else: + DEFAULT_TIMEOUT = None + FOREVER = 0 diff --git a/docs/index.rst b/docs/index.rst index f13129d..64f54c2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,35 +12,15 @@ Settings -------- Before we start, you'll have to update your ``settings.py`` to use one of the -caching backends provided by Cache Machine. Django's built-in caching backends -don't allow infinite cache timeouts, which are critical for doing invalidation -(see below). Cache Machine extends the ``locmem`` and ``memcached`` backends -provided by Django to enable indefinite caching when a timeout of ``0`` is +caching backends provided by Cache Machine. Prior to Django 1.6, Django's +built-in caching backends did not allow for infinite cache timeouts, +which are critical for doing invalidation (see below). Cache Machine extends +the ``locmem`` and ``memcached`` backends provided by Django to enable +indefinite caching when a timeout of ``caching.base.FOREVER`` is passed. If you were already using one of these backends, you can probably go -on using them just as you were. If you were caching things with a timeout of -``0``, there will be problems with those entities now getting cached forever. -You shouldn't have been doing that anyways. - -For memcached:: - - CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' - -For locmem (only recommended for testing):: - - CACHE_BACKEND = 'caching.backends.locmem://' - -Cache Machine will not work properly with the file or database cache backends. - -If you want to set a prefix for all keys in Cache Machine, define -``CACHE_PREFIX`` in settings.py:: - - CACHE_PREFIX = 'weee:' +on using them just as you were. - -Django 1.3 -^^^^^^^^^^ - -With Django 1.3 or higher, you should use the new ``CACHES`` setting:: +With Django 1.3 or higher, you should use the ``CACHES`` setting:: CACHES = { 'default': { @@ -74,7 +54,7 @@ options simply define a separate ``cache_machine`` entry for the 'server-2:11211', ], 'PREFIX': 'weee:', - }, + }, } .. note:: @@ -85,6 +65,25 @@ options simply define a separate ``cache_machine`` entry for the .. _pylibmc: http://sendapatch.se/projects/pylibmc/ + +Prior to Django 1.3 +^^^^^^^^^^^^^^^^^^^ + +For memcached:: + + CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' + +For locmem (only recommended for testing):: + + CACHE_BACKEND = 'caching.backends.locmem://' + +Cache Machine will not work properly with the file or database cache backends. + +If you want to set a prefix for all keys in Cache Machine, define +``CACHE_PREFIX`` in settings.py:: + + CACHE_PREFIX = 'weee:' + COUNT queries ^^^^^^^^^^^^^ @@ -96,6 +95,10 @@ short enough that stale counts won't be a big deal. :: CACHE_COUNT_TIMEOUT = 60 # seconds, not too long. +By default, calls to ``QuerySet.count()`` are not cached. They are only cached +if ``CACHE_COUNT_TIMEOUT`` is set to a value other than +``caching.base.NO_CACHE``. + Empty querysets ^^^^^^^^^^^^^^^ @@ -118,12 +121,12 @@ Here's what a minimal cached model looks like:: from django.db import models - import caching.base + from caching.base imoprt CachingManager, CachingMixin - class Zomg(caching.base.CachingMixin, models.Model): + class Zomg(CachingMixin, models.Model): val = models.IntegerField() - objects = caching.base.CachingManager() + objects = CachingManager() Whenever you run a query, ``CachingQuerySet`` will try to find that query in the cache. Queries are keyed by ``{prefix}:{sql}``. If it's there, we return @@ -162,6 +165,23 @@ The foundations of this module were derived from `Mike Malone's`_ .. _`Mike Malone's`: http://immike.net/ .. _django-caching: http://github.com/mmalone/django-caching/ +Changing the timeout of a CachingQuerySet instance +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, the timeout for a ``CachingQuerySet`` instance will be the timeout +of the underlying cache being used by Cache Machine. To change the timeout of +a ``CachingQuerySet`` instance, you can assign a different value to the +``timeout`` attribute which represents the number of seconds to cache for + +For example:: + + def get_objects(name): + qs = CachedClass.objects.filter(name=name) + qs.timeout = 5 # seconds + return qs + +To disable caching for a particular ``CachingQuerySet`` instance, set the +``timeout`` attribute to ``caching.base.NO_CACHE``. Manual Caching -------------- diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index 7ecfc1e..5148b82 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -2,10 +2,10 @@ CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.CacheClass', + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'cache_machine': { - 'BACKEND': 'caching.backends.memcached.CacheClass', + 'BACKEND': 'caching.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', }, } diff --git a/tests/test_cache.py b/tests/test_cache.py index c61f1f5..27601cf 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -8,8 +8,7 @@ import mock from nose.tools import eq_ -import caching.base as caching -from caching import invalidation +from caching import base, invalidation cache = invalidation.cache @@ -34,22 +33,22 @@ def __exit__(self, *args): class CachingTestCase(TestCase): multi_db = True - fixtures = ['testapp/test_cache.json'] + fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] extra_apps = ['tests.testapp'] def setUp(self): cache.clear() - self.old_timeout = caching.TIMEOUT + self.old_timeout = base.TIMEOUT if getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): invalidation.redis.flushall() def tearDown(self): - caching.TIMEOUT = self.old_timeout + base.TIMEOUT = self.old_timeout def test_flush_key(self): """flush_key should work for objects or strings.""" a = Addon.objects.get(id=1) - eq_(caching.flush_key(a.cache_key), caching.flush_key(a)) + eq_(base.flush_key(a.cache_key), base.flush_key(a)) def test_cache_key(self): a = Addon.objects.get(id=1) @@ -165,9 +164,9 @@ def test_raw_cache_params(self): @mock.patch('caching.base.CacheMachine') def test_raw_nocache(self, CacheMachine): - caching.TIMEOUT = 60 + base.TIMEOUT = 60 sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table - raw = list(Addon.objects.raw(sql, timeout=-1)) + raw = list(Addon.objects.raw(sql, timeout=base.NO_CACHE)) eq_(len(raw), 1) raw_addon = raw[0] assert not hasattr(raw_addon, 'from_cache') @@ -175,7 +174,7 @@ def test_raw_nocache(self, CacheMachine): @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): - caching.TIMEOUT = 60 + base.TIMEOUT = 60 cache_mock.scheme = 'memcached' cache_mock.get.return_value = None @@ -189,13 +188,13 @@ def test_count_cache(self, cache_mock): @mock.patch('caching.base.cached') def test_count_none_timeout(self, cached_mock): - caching.TIMEOUT = None + base.TIMEOUT = base.NO_CACHE Addon.objects.count() eq_(cached_mock.call_count, 0) @mock.patch('caching.base.cached') def test_count_nocache(self, cached_mock): - caching.TIMEOUT = 60 + base.TIMEOUT = 60 Addon.objects.no_cache().count() eq_(cached_mock.call_count, 0) @@ -203,7 +202,7 @@ def test_queryset_flush_list(self): """Check that we're making a flush list for the queryset.""" q = Addon.objects.all() objects = list(q) # Evaluate the queryset so it gets cached. - caching.invalidator.add_to_flush_list({q.flush_key(): ['remove-me']}) + base.invalidator.add_to_flush_list({q.flush_key(): ['remove-me']}) cache.set('remove-me', 15) Addon.objects.invalidate(objects[0]) @@ -299,7 +298,7 @@ def expensive(): return counter.call_count a = Addon.objects.get(id=1) - f = lambda: caching.cached_with(a, expensive, 'key') + f = lambda: base.cached_with(a, expensive, 'key') # Only gets called once. eq_(f(), 1) @@ -319,7 +318,7 @@ def expensive(): counter.reset_mock() q = Addon.objects.filter(id=1) - f = lambda: caching.cached_with(q, expensive, 'key') + f = lambda: base.cached_with(q, expensive, 'key') # Only gets called once. eq_(f(), 1) @@ -337,7 +336,7 @@ def f(): counter() return counter.call_count - eq_(caching.cached_with([], f, 'key'), 1) + eq_(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): u = ':'.join(map(encoding.smart_str, [u'תיאור אוסף'])) @@ -345,7 +344,7 @@ def test_cached_with_unicode(self): obj.query_key.return_value = u'xxx' obj.flush_key.return_value = 'key' f = lambda: 1 - eq_(caching.cached_with(obj, f, 'adf:%s' % u), 1) + eq_(base.cached_with(obj, f, 'adf:%s' % u), 1) def test_cached_method(self): a = Addon.objects.get(id=1) @@ -460,14 +459,14 @@ def test_make_key_unicode(self): translation.activate(u'en-US') f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' # This would crash with a unicode error. - caching.make_key(f, with_locale=True) + base.make_key(f, with_locale=True) translation.deactivate() @mock.patch('caching.invalidation.cache.get_many') def test_get_flush_lists_none(self, cache_mock): if not getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): cache_mock.return_value.values.return_value = [None, [1]] - eq_(caching.invalidator.get_flush_lists(None), set([1])) + eq_(base.invalidator.get_flush_lists(None), set([1])) def test_multidb_cache(self): """ Test where master and slave DB result in two different cache keys """ @@ -487,5 +486,3 @@ def test_multidb_fetch_by_id(self): from_slave = Addon.objects.using('slave').get(id=1) assert from_slave.from_cache is False assert from_slave._state.db == 'slave' - - From f9022f2c74f9cc3ea267b83144d4accf7193f6a4 Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Sat, 9 Nov 2013 21:40:05 +1100 Subject: [PATCH 061/214] Corrected incorrect default value for CACHE_COUNT_TIMEOUT. --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 3c772e3..5292d19 100644 --- a/caching/base.py +++ b/caching/base.py @@ -24,7 +24,7 @@ def emit(self, record): CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) -TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', DEFAULT_TIMEOUT) +TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', NO_CACHE) class CachingManager(models.Manager): From 978a14053726e92c5dc27d48d92de8e0b9dba2e3 Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Sat, 16 Nov 2013 16:17:23 +1100 Subject: [PATCH 062/214] Updated travis config for 1.6. --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index b1a3acb..23b165f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,3 +17,4 @@ env: - DJANGO=1.3.3 - DJANGO=1.4.1 - DJANGO=1.5 + - DJANGO=1.6 From c8da260b477646aaa6896ef7936cdf36e4f1b4c8 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Thu, 12 Dec 2013 15:01:24 -0800 Subject: [PATCH 063/214] django 1.6 removes `dup_select_related` --- caching/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 5292d19..32876e0 100644 --- a/caching/base.py +++ b/caching/base.py @@ -195,7 +195,7 @@ def fetch_missed(self, pks): if hasattr(others, 'no_cache'): others = others.no_cache() if self.query.select_related: - others.dup_select_related(self) + others.query.select_related = self.query.select_related return others def count(self): From 925e4aa3886dc41fdd5575dd5de9f24261c3f361 Mon Sep 17 00:00:00 2001 From: Iain Dawson Date: Thu, 6 Feb 2014 16:39:21 +0000 Subject: [PATCH 064/214] Fix typo of 'import'. --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 64f54c2..78e1a46 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -121,7 +121,7 @@ Here's what a minimal cached model looks like:: from django.db import models - from caching.base imoprt CachingManager, CachingMixin + from caching.base import CachingManager, CachingMixin class Zomg(CachingMixin, models.Model): val = models.IntegerField() From 711108c148549e8c074aff335601265ea2bd7499 Mon Sep 17 00:00:00 2001 From: Iain Dawson Date: Thu, 6 Feb 2014 16:39:37 +0000 Subject: [PATCH 065/214] Correct references to Django's KEY_PREFIX setting. --- docs/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 78e1a46..c4c6a51 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -29,12 +29,12 @@ With Django 1.3 or higher, you should use the ``CACHES`` setting:: 'server-1:11211', 'server-2:11211', ], - 'PREFIX': 'weee:', + 'KEY_PREFIX': 'weee:', }, } Note that we have to specify the class, not the module, for the ``BACKEND`` -property, and that the ``PREFIX`` is optional. The ``LOCATION`` may be a +property, and that the ``KEY_PREFIX`` is optional. The ``LOCATION`` may be a string, instead of a list, if you only have one server. If you require the default cache backend to be a different type of @@ -53,7 +53,7 @@ options simply define a separate ``cache_machine`` entry for the 'server-1:11211', 'server-2:11211', ], - 'PREFIX': 'weee:', + 'KEY_PREFIX': 'weee:', }, } From 74e1b82bd6bf2472d8725c270672b2402da19b8f Mon Sep 17 00:00:00 2001 From: Allen Short Date: Fri, 3 Jan 2014 12:33:54 -0800 Subject: [PATCH 066/214] handle EmptyResultSet --- caching/base.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/caching/base.py b/caching/base.py index 32876e0..53ee3d0 100644 --- a/caching/base.py +++ b/caching/base.py @@ -4,7 +4,7 @@ from django.conf import settings from django.db import models from django.db.models import signals -from django.db.models.sql import query +from django.db.models.sql import query, EmptyResultSet from django.utils import encoding from .compat import DEFAULT_TIMEOUT, FOREVER @@ -200,7 +200,10 @@ def fetch_missed(self, pks): def count(self): super_count = super(CachingQuerySet, self).count - query_string = 'count:%s' % self.query_key() + try: + query_string = 'count:%s' % self.query_key() + except query.EmptyResultSet: + return 0 if self.timeout == NO_CACHE or TIMEOUT == NO_CACHE: return super_count() else: @@ -290,10 +293,11 @@ def cached(function, key_, duration=DEFAULT_TIMEOUT): def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): """Helper for caching a function call within an object's flush list.""" + try: obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) - except AttributeError: + except (AttributeError, EmptyResultSet): log.warning(u'%r cannot be cached.' % encoding.smart_str(obj)) return f() From b8ede9c09686f050ca8ea6a87575c66c175ee4e4 Mon Sep 17 00:00:00 2001 From: Reza Mohammadi Date: Thu, 17 Jul 2014 16:34:55 +0430 Subject: [PATCH 067/214] Travis config update * Dropping mirror related pip env variables * Dropping test of unsupported Django 1.3 * Adding Django 1.7 RC1 to test environments --- .travis.yml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 23b165f..247bab1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,16 +5,13 @@ services: python: - "2.6" - "2.7" -before_install: - - export PIP_USE_MIRRORS=true - - export PIP_INDEX_URL=https://simple.crate.io/ install: - pip install -e . - - pip install -r requirements.txt Django==$DJANGO + - pip install -r requirements.txt "$DJANGO_SPEC" script: - fab test env: - - DJANGO=1.3.3 - - DJANGO=1.4.1 - - DJANGO=1.5 - - DJANGO=1.6 + - DJANGO_SPEC="Django>=1.4,<1.5" + - DJANGO_SPEC="Django>=1.5,<1.6" + - DJANGO_SPEC="Django>=1.6,<1.7" + - DJANGO_SPEC="https://www.djangoproject.com/download/1.7c1/tarball/" From 6ef1b5bfb31c26445034253e7025ba98b41f9cba Mon Sep 17 00:00:00 2001 From: Jeremiah Orem Date: Fri, 11 Jul 2014 13:50:58 -0700 Subject: [PATCH 068/214] change default redis db from 1 to 0 This will prevent redis-py from sending SELECT calls on each connection and make the library compatible, by default, with twemproxy. --- caching/invalidation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 17e8c21..b004675 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -185,11 +185,11 @@ def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. _, server, params = parse_backend_uri(settings.REDIS_BACKEND) - db = params.pop('db', 1) + db = params.pop('db', 0) try: db = int(db) except (ValueError, TypeError): - db = 1 + db = 0 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): From 5ab6389277e573af28ee9c0990df8b92a9055dfd Mon Sep 17 00:00:00 2001 From: Reza Mohammadi Date: Thu, 17 Jul 2014 17:44:30 +0430 Subject: [PATCH 069/214] Compatible with Django 1.7 `django.core.cache.parse_backend_uri` was dropped in [1], so the code is now included. [1]: https://github.com/django/django/commit/4e9f800742c3048402a --- caching/invalidation.py | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index b004675..8b185ac 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -5,7 +5,7 @@ import socket from django.conf import settings -from django.core.cache import cache as default_cache, get_cache, parse_backend_uri +from django.core.cache import cache as default_cache, get_cache from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation @@ -181,10 +181,36 @@ def add_to_flush_list(self, mapping): return +def parse_backend_uri(backend_uri): + """ + Converts the "backend_uri" into a host and any extra params that are + required for the backend. Returns a (host, params) tuple. + """ + backend_uri_sliced = backend_uri.split('://') + if len(backend_uri_sliced) > 2: + raise InvalidCacheBackendError("Backend URI can't have more than one scheme://") + elif len(backend_uri_sliced) == 2: + rest = backend_uri_sliced[1] + else: + rest = backend_uri_sliced[0] + + host = rest[2:] + qpos = rest.find('?') + if qpos != -1: + params = dict(parse_qsl(rest[qpos+1:])) + host = rest[2:qpos] + else: + params = {} + if host.endswith('/'): + host = host[:-1] + + return host, params + + def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. - _, server, params = parse_backend_uri(settings.REDIS_BACKEND) + server, params = parse_backend_uri(settings.REDIS_BACKEND) db = params.pop('db', 0) try: db = int(db) From d017dd810426b3060fbd37cfec8de87d27921628 Mon Sep 17 00:00:00 2001 From: Reza Mohammadi Date: Thu, 17 Jul 2014 18:55:52 +0430 Subject: [PATCH 070/214] Now should pass all the travis tests + Removing Django 1.3 related stuff from documentation --- docs/index.rst | 23 ++--------------------- fabfile.py | 9 ++++++++- 2 files changed, 10 insertions(+), 22 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 64f54c2..d54dc91 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -20,7 +20,7 @@ indefinite caching when a timeout of ``caching.base.FOREVER`` is passed. If you were already using one of these backends, you can probably go on using them just as you were. -With Django 1.3 or higher, you should use the ``CACHES`` setting:: +With Django 1.4 or higher, you should use the ``CACHES`` setting:: CACHES = { 'default': { @@ -60,30 +60,11 @@ options simply define a separate ``cache_machine`` entry for the .. note:: Cache Machine also supports the other memcache backend support by - Django >= 1.3 based on pylibmbc_: + Django >= 1.4 based on pylibmbc_: ``caching.backends.memcached.PyLibMCCache``. .. _pylibmc: http://sendapatch.se/projects/pylibmc/ - -Prior to Django 1.3 -^^^^^^^^^^^^^^^^^^^ - -For memcached:: - - CACHE_BACKEND = 'caching.backends.memcached://localhost:11211' - -For locmem (only recommended for testing):: - - CACHE_BACKEND = 'caching.backends.locmem://' - -Cache Machine will not work properly with the file or database cache backends. - -If you want to set a prefix for all keys in Cache Machine, define -``CACHE_PREFIX`` in settings.py:: - - CACHE_PREFIX = 'weee:' - COUNT queries ^^^^^^^^^^^^^ diff --git a/fabfile.py b/fabfile.py index a6ed3bd..7701c7f 100644 --- a/fabfile.py +++ b/fabfile.py @@ -6,6 +6,7 @@ """ import functools import os +import sys from fabric.api import local, cd, env from fabric.contrib.project import rsync_project @@ -39,8 +40,14 @@ def doc(kind='html'): print 'WARNING: Skipping redis tests.' def test(): + if sys.version_info.major == 2 and sys.version_info.minor < 7: + import django + if django.VERSION[1] >= 7: + print("Skipping becuase Django >= 1.7 doesn't work with Python < 2.7") + return + for settings in SETTINGS: - print settings + print(settings) os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings local('django-admin.py test') From 021f380f9ee3e80c8a58b50407dc40cfe2e99d1b Mon Sep 17 00:00:00 2001 From: Reza Mohammadi Date: Thu, 17 Jul 2014 19:12:54 +0430 Subject: [PATCH 071/214] fixup: `version_info` of python 2.6 is a tuple --- fabfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fabfile.py b/fabfile.py index 7701c7f..aa78442 100644 --- a/fabfile.py +++ b/fabfile.py @@ -40,7 +40,7 @@ def doc(kind='html'): print 'WARNING: Skipping redis tests.' def test(): - if sys.version_info.major == 2 and sys.version_info.minor < 7: + if sys.version_info[0] == 2 and sys.version_info[1] < 7: import django if django.VERSION[1] >= 7: print("Skipping becuase Django >= 1.7 doesn't work with Python < 2.7") From 844ddc6cc3eb44b77d25276d7d0eec3d769f86c0 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Fri, 5 Sep 2014 14:15:59 -0400 Subject: [PATCH 072/214] Fixed misspelling 'imoprt' to 'import' --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index d54dc91..563c42d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -102,7 +102,7 @@ Here's what a minimal cached model looks like:: from django.db import models - from caching.base imoprt CachingManager, CachingMixin + from caching.base import CachingManager, CachingMixin class Zomg(CachingMixin, models.Model): val = models.IntegerField() From e609bc2011546ab84fa19d21a6cb2933acaa6bc5 Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Wed, 1 Oct 2014 23:41:46 -0400 Subject: [PATCH 073/214] Use get_queryset() instead of get_query_set() The latter is deprecated in Django 1.7. --- caching/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 53ee3d0..b2032fc 100644 --- a/caching/base.py +++ b/caching/base.py @@ -32,7 +32,7 @@ class CachingManager(models.Manager): # Tell Django to use this manager when resolving foreign keys. use_for_related_fields = True - def get_query_set(self): + def get_queryset(self): return CachingQuerySet(self.model, using=self._db) def contribute_to_class(self, cls, name): @@ -56,7 +56,7 @@ def raw(self, raw_query, params=None, *args, **kwargs): using=self._db, *args, **kwargs) def cache(self, timeout=DEFAULT_TIMEOUT): - return self.get_query_set().cache(timeout) + return self.get_queryset().cache(timeout) def no_cache(self): return self.cache(NO_CACHE) From 44789512e21a631cca039a3dfe77a37281740513 Mon Sep 17 00:00:00 2001 From: Rob Hudson Date: Tue, 7 Oct 2014 16:44:40 -0700 Subject: [PATCH 074/214] Added missing parse_sql import --- caching/invalidation.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 8b185ac..93251e8 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -5,9 +5,11 @@ import socket from django.conf import settings -from django.core.cache import cache as default_cache, get_cache +from django.core.cache import cache as default_cache +from django.core.cache import get_cache from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation +from django.utils.six.moves.urllib.parse import parse_qsl try: import redis as redislib @@ -188,7 +190,8 @@ def parse_backend_uri(backend_uri): """ backend_uri_sliced = backend_uri.split('://') if len(backend_uri_sliced) > 2: - raise InvalidCacheBackendError("Backend URI can't have more than one scheme://") + raise InvalidCacheBackendError( + "Backend URI can't have more than one scheme://") elif len(backend_uri_sliced) == 2: rest = backend_uri_sliced[1] else: From 0207c42531e9fdcda63ceedca71dec93b6844f44 Mon Sep 17 00:00:00 2001 From: Rob Hudson Date: Thu, 9 Oct 2014 10:27:01 -0700 Subject: [PATCH 075/214] Update Travis to use the released version of Django 1.7 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 247bab1..035903a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,4 +14,4 @@ env: - DJANGO_SPEC="Django>=1.4,<1.5" - DJANGO_SPEC="Django>=1.5,<1.6" - DJANGO_SPEC="Django>=1.6,<1.7" - - DJANGO_SPEC="https://www.djangoproject.com/download/1.7c1/tarball/" + - DJANGO_SPEC="Django>=1.7,<1.8" From 3d7df962d24617ce1965659464be7bf132d63ca1 Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Sat, 11 Oct 2014 10:21:27 -0400 Subject: [PATCH 076/214] Alias get_query_set to get_queryset As per Django 1.6 release notes: https://docs.djangoproject.com/en/dev/releases/1.6/#get-query-set-and-similar-methods-renamed-to-get-queryset --- caching/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/caching/base.py b/caching/base.py index b2032fc..c7fd089 100644 --- a/caching/base.py +++ b/caching/base.py @@ -1,6 +1,7 @@ import functools import logging +import django from django.conf import settings from django.db import models from django.db.models import signals @@ -35,6 +36,9 @@ class CachingManager(models.Manager): def get_queryset(self): return CachingQuerySet(self.model, using=self._db) + if django.VERSION < (1, 6): + get_query_set = get_queryset + def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) signals.post_delete.connect(self.post_delete, sender=cls) From f89f656947201d4132a06850772185d2b977366c Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Sat, 11 Oct 2014 10:21:57 -0400 Subject: [PATCH 077/214] get_query_set -> get_queryset in docs for consistency --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 563c42d..7481d02 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -95,7 +95,7 @@ that class and inherit from the :class:`~caching.base.CachingMixin`. If you want related lookups (foreign keys) to hit the cache, ``CachingManager`` must be the default manager. If you have multiple managers that should be cached, return a :class:`~caching.base.CachingQuerySet` from the other manager's -``get_query_set`` method instead of subclassing ``CachingManager``, since that +``get_queryset`` method instead of subclassing ``CachingManager``, since that would hook up the post_save and post_delete signals multiple times. Here's what a minimal cached model looks like:: From 6b2b37ebbcac85a9f86e0e3b64c1c54243f50006 Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Mon, 23 Mar 2015 08:11:05 -0700 Subject: [PATCH 078/214] This project needs a maintainer. --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 0440bd9..df67634 100644 --- a/README.rst +++ b/README.rst @@ -2,6 +2,8 @@ Cache Machine ============= +`This project needs a maintainer. `_ + Cache Machine provides automatic caching and invalidation for Django models through the ORM. From 144b79ad15edb6d4b002b34511b24bf12c811e1c Mon Sep 17 00:00:00 2001 From: Jason Thomas Date: Fri, 24 Apr 2015 11:24:04 -0400 Subject: [PATCH 079/214] Fix parse_backend_uri. fixes #92 --- caching/invalidation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 93251e8..7697981 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -197,11 +197,11 @@ def parse_backend_uri(backend_uri): else: rest = backend_uri_sliced[0] - host = rest[2:] + host = rest qpos = rest.find('?') if qpos != -1: params = dict(parse_qsl(rest[qpos+1:])) - host = rest[2:qpos] + host = rest[:qpos] else: params = {} if host.endswith('/'): From 3415ae789f3e943074401a6a81dc344135b8e684 Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Tue, 26 May 2015 22:46:29 -0400 Subject: [PATCH 080/214] Switch from get_cache to caches get_cache is deprecated and will be removed in Django 1.9 --- caching/invalidation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 93251e8..4f056dc 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -6,7 +6,7 @@ from django.conf import settings from django.core.cache import cache as default_cache -from django.core.cache import get_cache +from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation from django.utils.six.moves.urllib.parse import parse_qsl @@ -18,7 +18,7 @@ # Look for an own cache first before falling back to the default cache try: - cache = get_cache('cache_machine') + cache = caches['cache_machine'] except (InvalidCacheBackendError, ValueError): cache = default_cache From af1e833a75273a756eacfc88f205cc3b841954d3 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 10:24:55 -0400 Subject: [PATCH 081/214] add MIDDLEWARE_CLASSES to test settings to avoid warnings when running with Django 1.7 --- examples/cache_machine/settings.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index b6a43df..0048a04 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -24,3 +24,13 @@ ) SECRET_KEY = 'ok' + +MIDDLEWARE_CLASSES = ( + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +) From b109dfbd8e8f391bd2936a6ce1ce1ce9aca63117 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 19:14:35 -0400 Subject: [PATCH 082/214] get tests running with Django 1.8; remove references to outdated CacheClass class in memcached and locmem backends --- .travis.yml | 1 + caching/backends/locmem.py | 7 +------ caching/backends/memcached.py | 17 +++++------------ docs/index.rst | 3 +++ docs/releases.rst | 21 +++++++++++++++++++++ examples/cache_machine/locmem_settings.py | 2 +- 6 files changed, 32 insertions(+), 19 deletions(-) create mode 100644 docs/releases.rst diff --git a/.travis.yml b/.travis.yml index 035903a..002b2a9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,3 +15,4 @@ env: - DJANGO_SPEC="Django>=1.5,<1.6" - DJANGO_SPEC="Django>=1.6,<1.7" - DJANGO_SPEC="Django>=1.7,<1.8" + - DJANGO_SPEC="Django>=1.8,<1.9" diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index db0a0ad..eb991bd 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -37,10 +37,5 @@ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): return super(InfinityMixin, self).set(key, value, timeout, version) -class CacheClass(InfinityMixin, locmem.CacheClass): +class LocMemCache(InfinityMixin, locmem.LocMemCache): pass - - -if django.VERSION[:2] >= (1, 3): - class LocMemCache(InfinityMixin, locmem.LocMemCache): - pass diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index 55a59b2..e872721 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -14,15 +14,8 @@ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): return super(InfinityMixin, self).set(key, value, timeout, version) -if django.VERSION[:2] >= (1, 3): - class MemcachedCache(InfinityMixin, memcached.MemcachedCache): - pass - - class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): - pass - - class CacheClass(MemcachedCache): - pass -else: - class CacheClass(InfinityMixin, memcached.CacheClass): - pass +class MemcachedCache(InfinityMixin, memcached.MemcachedCache): + pass + +class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): + pass diff --git a/docs/index.rst b/docs/index.rst index 7481d02..ad7383b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,6 +8,9 @@ Cache Machine provides automatic caching and invalidation for Django models through the ORM. The code is hosted on `github `_. +For an overview of new features and backwards-incompatible changes which may +affect you, please see the :ref:`release-notes`. + Settings -------- diff --git a/docs/releases.rst b/docs/releases.rst new file mode 100644 index 0000000..fcea53d --- /dev/null +++ b/docs/releases.rst @@ -0,0 +1,21 @@ +.. _release-notes: + +Release Notes +================== + +v0.8.1 (release date TBD) +-------------------------------------- + +This release is primarily aimed at adding support for more recent versions of +Django and catching up on recent contributions. + +- Allow test suite to run under Django 1.7 and Django 1.8 + +Backwards Incompatible Changes +________________________________ + +- Dropped support for the old style ``caching.backends.memcached.CacheClass`` and + ``caching.backends.locmem.CacheClass`` classes. Support for this naming + has been deprecated since Django 1.3. You will need to switch your project + to use ``MemcachedCache``, ``PyLibMCCache``, or ``LocMemCache`` in place of + ``CacheClass``. diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index 5b3cbaf..edc370a 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -2,6 +2,6 @@ CACHES = { 'default': { - 'BACKEND': 'caching.backends.locmem.CacheClass', + 'BACKEND': 'caching.backends.locmem.LocMemCache', }, } From 0a3b16aac49e4b7c63bedeebc39ff2684d71dba2 Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Tue, 30 Jun 2015 20:27:05 -0400 Subject: [PATCH 083/214] Add backwards compatibility check Use get_cache for versions of Django before 1.8 --- caching/invalidation.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 4f056dc..fe1349d 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -4,12 +4,13 @@ import logging import socket +import django from django.conf import settings from django.core.cache import cache as default_cache -from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation from django.utils.six.moves.urllib.parse import parse_qsl +print 'hi' try: import redis as redislib @@ -18,7 +19,12 @@ # Look for an own cache first before falling back to the default cache try: - cache = caches['cache_machine'] + if django.VERSION[:2] >= (1, 8): + from django.core.cache import caches + cache = caches['cache_machine'] + else: + from django.core.cache import get_cache + cache = get_cache('cache_machine') except (InvalidCacheBackendError, ValueError): cache = default_cache From 5a63eeb9a4d831142d0b47e2af04fd41cb1cdc09 Mon Sep 17 00:00:00 2001 From: Eric Brelsford Date: Tue, 30 Jun 2015 20:31:40 -0400 Subject: [PATCH 084/214] Use caches rather than get_cache for Django >= 1.7 --- caching/invalidation.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index fe1349d..8a1d5f2 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -10,7 +10,6 @@ from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation from django.utils.six.moves.urllib.parse import parse_qsl -print 'hi' try: import redis as redislib @@ -19,7 +18,7 @@ # Look for an own cache first before falling back to the default cache try: - if django.VERSION[:2] >= (1, 8): + if django.VERSION[:2] >= (1, 7): from django.core.cache import caches cache = caches['cache_machine'] else: From 31737490ddddc8fcd0353030909e9b8d12fead93 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 21:35:32 -0400 Subject: [PATCH 085/214] don't try to run tests on Python 2.6 with Django 1.7 or later --- .travis.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.travis.yml b/.travis.yml index 002b2a9..1d9011f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,3 +16,10 @@ env: - DJANGO_SPEC="Django>=1.6,<1.7" - DJANGO_SPEC="Django>=1.7,<1.8" - DJANGO_SPEC="Django>=1.8,<1.9" + +matrix: + exclude: + - python: "2.6" + env: DJANGO_SPEC="Django>=1.7,<1.8" + - python: "2.6" + env: DJANGO_SPEC="Django>=1.8,<1.9" From 183c3ec7c5103c092e4fe1d2884bd95b20802eb4 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 21:43:52 -0400 Subject: [PATCH 086/214] correct repository and Travis CI urls --- README.rst | 12 ++++++------ docs/index.rst | 2 +- setup.py | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.rst b/README.rst index df67634..2fb169e 100644 --- a/README.rst +++ b/README.rst @@ -9,8 +9,8 @@ through the ORM. For full docs, see https://cache-machine.readthedocs.org/en/latest/. -.. image:: https://travis-ci.org/jbalogh/django-cache-machine.png - :target: https://travis-ci.org/jbalogh/django-cache-machine +.. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.png + :target: https://travis-ci.org/django-cache-machine/django-cache-machine Requirements @@ -27,18 +27,18 @@ Get it from `pypi `_:: pip install django-cache-machine -or `github `_:: +or `github `_:: - pip install -e git://github.com/jbalogh/django-cache-machine.git#egg=django-cache-machine + pip install -e git://github.com/django-cache-machine/django-cache-machine.git#egg=django-cache-machine Running Tests ------------- -Get it from `github `_:: +Get it from `github `_:: - git clone git://github.com/jbalogh/django-cache-machine.git + git clone git://github.com/django-cache-machine/django-cache-machine.git cd django-cache-machine pip install -r requirements.txt fab test diff --git a/docs/index.rst b/docs/index.rst index 7481d02..3270b9a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,7 +6,7 @@ Cache Machine Cache Machine provides automatic caching and invalidation for Django models through the ORM. The code is hosted on -`github `_. +`github `_. Settings -------- diff --git a/setup.py b/setup.py index dae2dd0..5287164 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ long_description=open('README.rst').read(), author='Jeff Balogh', author_email='jbalogh@mozilla.com', - url='http://github.com/jbalogh/django-cache-machine', + url='http://github.com/django-cache-machine/django-cache-machine', license='BSD', packages=['caching', 'caching.backends'], include_package_data=True, From a52f14b6862b8a2d71d4ec0f673404a2714fc946 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 22:22:14 -0400 Subject: [PATCH 087/214] minor code formatting cleanup; run flake8 as part of Travis build --- .gitignore | 1 + .travis.yml | 1 + caching/backends/memcached.py | 2 +- caching/base.py | 4 ++-- caching/compat.py | 10 +++++----- examples/cache_machine/custom_backend.py | 2 +- examples/cache_machine/locmem_settings.py | 2 +- examples/cache_machine/memcache_byid.py | 2 +- examples/cache_machine/redis_byid.py | 2 +- examples/cache_machine/redis_settings.py | 2 +- fabfile.py | 1 + setup.cfg | 2 ++ tests/test_cache.py | 7 +++++-- 13 files changed, 23 insertions(+), 15 deletions(-) create mode 100644 setup.cfg diff --git a/.gitignore b/.gitignore index 7782d6a..56b5bab 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ docs/_build *.py[co] *.egg-info +*~ diff --git a/.travis.yml b/.travis.yml index 1d9011f..dd33014 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ install: - pip install -r requirements.txt "$DJANGO_SPEC" script: - fab test + - flake8 . env: - DJANGO_SPEC="Django>=1.4,<1.5" - DJANGO_SPEC="Django>=1.5,<1.6" diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index e872721..f81ff0e 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,4 +1,3 @@ -import django from django.core.cache.backends import memcached from caching.compat import DEFAULT_TIMEOUT @@ -17,5 +16,6 @@ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): class MemcachedCache(InfinityMixin, memcached.MemcachedCache): pass + class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): pass diff --git a/caching/base.py b/caching/base.py index c7fd089..1449269 100644 --- a/caching/base.py +++ b/caching/base.py @@ -8,7 +8,7 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding -from .compat import DEFAULT_TIMEOUT, FOREVER +from .compat import DEFAULT_TIMEOUT from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -251,7 +251,7 @@ def _cache_key(cls, pk, db): def _cache_keys(self): """Return the cache key for self plus all related foreign keys.""" fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields - if isinstance(f, models.ForeignKey)) + if isinstance(f, models.ForeignKey)) keys = [fk.rel.to._cache_key(val, self._state.db) for fk, val in fks.items() if val is not None and hasattr(fk.rel.to, '_cache_key')] diff --git a/caching/compat.py b/caching/compat.py index 3f5c7de..3f0f78d 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -4,9 +4,9 @@ if django.VERSION[:2] >= (1, 6): - from django.core.cache.backends.base import DEFAULT_TIMEOUT as DJANGO_DEFAULT_TIMEOUT - DEFAULT_TIMEOUT = DJANGO_DEFAULT_TIMEOUT - FOREVER = None + from django.core.cache.backends.base import DEFAULT_TIMEOUT as DJANGO_DEFAULT_TIMEOUT + DEFAULT_TIMEOUT = DJANGO_DEFAULT_TIMEOUT + FOREVER = None else: - DEFAULT_TIMEOUT = None - FOREVER = 0 + DEFAULT_TIMEOUT = None + FOREVER = 0 diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index 5148b82..0b7b45d 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -1,4 +1,4 @@ -from settings import * +from settings import * # flake8: noqa CACHES = { 'default': { diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index edc370a..d96ed81 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -1,4 +1,4 @@ -from settings import * +from settings import * # flake8: noqa CACHES = { 'default': { diff --git a/examples/cache_machine/memcache_byid.py b/examples/cache_machine/memcache_byid.py index 85d711d..f100ba4 100644 --- a/examples/cache_machine/memcache_byid.py +++ b/examples/cache_machine/memcache_byid.py @@ -1,3 +1,3 @@ -from settings import * +from settings import * # flake8: noqa FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_byid.py b/examples/cache_machine/redis_byid.py index 0504351..747015e 100644 --- a/examples/cache_machine/redis_byid.py +++ b/examples/cache_machine/redis_byid.py @@ -1,3 +1,3 @@ -from redis_settings import * +from redis_settings import * # flake8: noqa FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_settings.py b/examples/cache_machine/redis_settings.py index a11fa00..5c228c9 100644 --- a/examples/cache_machine/redis_settings.py +++ b/examples/cache_machine/redis_settings.py @@ -1,4 +1,4 @@ -from settings import * +from settings import * # flake8: noqa CACHE_MACHINE_USE_REDIS = True REDIS_BACKEND = 'redis://' diff --git a/fabfile.py b/fabfile.py index aa78442..d311a05 100644 --- a/fabfile.py +++ b/fabfile.py @@ -39,6 +39,7 @@ def doc(kind='html'): except Exception: print 'WARNING: Skipping redis tests.' + def test(): if sys.version_info[0] == 2 and sys.version_info[1] < 7: import django diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..0e2e870 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=100 diff --git a/tests/test_cache.py b/tests/test_cache.py index 27601cf..7e0d7f3 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -179,7 +179,7 @@ def test_count_cache(self, cache_mock): cache_mock.get.return_value = None q = Addon.objects.all() - count = q.count() + q.count() args, kwargs = cache_mock.set.call_args key, value, timeout = args @@ -211,6 +211,7 @@ def test_queryset_flush_list(self): def test_jinja_cache_tag_queryset(self): env = jinja2.Environment(extensions=['caching.ext.cache']) + def check(q, expected): t = env.from_string( "{% cache q %}{% for x in q %}{{ x.id }}:{{ x.val }};" @@ -231,7 +232,7 @@ def check(q, expected): a.save() q = Addon.objects.all() - flush = cache.get(q.flush_key()) + cache.get(q.flush_key()) assert cache.get(q.flush_key()) is None check(Addon.objects.all(), '1:17;2:42;') @@ -293,6 +294,7 @@ def check(obj, expected): def test_cached_with(self): counter = mock.Mock() + def expensive(): counter() return counter.call_count @@ -332,6 +334,7 @@ def expensive(): def test_cached_with_bad_object(self): """cached_with shouldn't fail if the object is missing a cache key.""" counter = mock.Mock() + def f(): counter() return counter.call_count From 0e7b9a68d6e7218b705335e826450b5acf94a027 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 22:31:49 -0400 Subject: [PATCH 088/214] add flake8 to requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index ed588ee..4353bd2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ python-memcached fabric jinja2 redis +flake8 From 9bbb1d98e2d347e6c2a1db70c6aad59ddc31e1d5 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 23:25:40 -0400 Subject: [PATCH 089/214] run coverage.py as part of Travis build and report back to Coveralls --- .coveragerc | 2 ++ .gitignore | 1 + .travis.yml | 4 +++- fabfile.py | 58 ------------------------------------------------ requirements.txt | 1 + run_tests.py | 45 +++++++++++++++++++++++++++++++++++++ 6 files changed, 52 insertions(+), 59 deletions(-) create mode 100644 .coveragerc delete mode 100644 fabfile.py create mode 100644 run_tests.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..28edad5 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +source = caching diff --git a/.gitignore b/.gitignore index 56b5bab..ea0208f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.coverage docs/_build *.py[co] *.egg-info diff --git a/.travis.yml b/.travis.yml index dd33014..324ec3f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,16 +8,18 @@ python: install: - pip install -e . - pip install -r requirements.txt "$DJANGO_SPEC" + - pip install coveralls script: - fab test - flake8 . +after_success: + - coveralls env: - DJANGO_SPEC="Django>=1.4,<1.5" - DJANGO_SPEC="Django>=1.5,<1.6" - DJANGO_SPEC="Django>=1.6,<1.7" - DJANGO_SPEC="Django>=1.7,<1.8" - DJANGO_SPEC="Django>=1.8,<1.9" - matrix: exclude: - python: "2.6" diff --git a/fabfile.py b/fabfile.py deleted file mode 100644 index d311a05..0000000 --- a/fabfile.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Creating standalone Django apps is a PITA because you're not in a project, so -you don't have a settings.py file. I can never remember to define -DJANGO_SETTINGS_MODULE, so I run these commands which get the right env -automatically. -""" -import functools -import os -import sys - -from fabric.api import local, cd, env -from fabric.contrib.project import rsync_project - -NAME = os.path.basename(os.path.dirname(__file__)) -ROOT = os.path.abspath(os.path.dirname(__file__)) - -os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, - os.path.join(ROOT, 'examples')]) - -env.hosts = ['jbalogh.me'] - -local = functools.partial(local, capture=False) - - -def doc(kind='html'): - with cd('docs'): - local('make clean %s' % kind) - - -SETTINGS = ('locmem_settings', - 'settings', - 'memcache_byid', - 'custom_backend') - -try: - import redis - redis.Redis(host='localhost', port=6379).info() - SETTINGS += ('redis_settings', 'redis_byid') -except Exception: - print 'WARNING: Skipping redis tests.' - - -def test(): - if sys.version_info[0] == 2 and sys.version_info[1] < 7: - import django - if django.VERSION[1] >= 7: - print("Skipping becuase Django >= 1.7 doesn't work with Python < 2.7") - return - - for settings in SETTINGS: - print(settings) - os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings - local('django-admin.py test') - - -def updoc(): - doc('dirhtml') - rsync_project('p/%s' % NAME, 'docs/_build/dirhtml/', delete=True) diff --git a/requirements.txt b/requirements.txt index 4353bd2..dec4880 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ fabric jinja2 redis flake8 +coverage diff --git a/run_tests.py b/run_tests.py new file mode 100644 index 0000000..548372e --- /dev/null +++ b/run_tests.py @@ -0,0 +1,45 @@ +""" +Creating standalone Django apps is a PITA because you're not in a project, so +you don't have a settings.py file. I can never remember to define +DJANGO_SETTINGS_MODULE, so I run these commands which get the right env +automatically. +""" +import os +import sys + +from subprocess import call, check_output + +NAME = os.path.basename(os.path.dirname(__file__)) +ROOT = os.path.abspath(os.path.dirname(__file__)) + +os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, + os.path.join(ROOT, 'examples')]) + +SETTINGS = ( + 'locmem_settings', + 'settings', + 'memcache_byid', + 'custom_backend', + 'redis_settings', + 'redis_byid', +) + + +def main(): + results = [] + django_admin = check_output(['which', 'django-admin.py']).strip() + for i, settings in enumerate(SETTINGS): + print('Running tests for: %s' % settings) + os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings + # append to the existing coverage data for all but the first run + if i > 0: + coverage_cmd = ['coverage', 'run', '--append', django_admin, 'test'] + else: + coverage_cmd = ['coverage', 'run', django_admin, 'test'] + results.append(call(coverage_cmd)) + results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) + sys.exit(any(results) and 1 or 0) + + +if __name__ == "__main__": + main() From f759f22fa03594590aa2be3af41fc82adb10bb50 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 23:28:53 -0400 Subject: [PATCH 090/214] remove Fabric from requirements.txt --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dec4880..cd7dbbb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ sphinx mock django-nose python-memcached -fabric jinja2 redis flake8 From 079d1d9d8854b0203d2617f616311f6cbb04083c Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 23:36:56 -0400 Subject: [PATCH 091/214] update command to run tests in .travis.yml --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 324ec3f..ebc0c26 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,7 +10,7 @@ install: - pip install -r requirements.txt "$DJANGO_SPEC" - pip install coveralls script: - - fab test + - python run_tests.py - flake8 . after_success: - coveralls From 2737938659c2cf979c9b7b0b352d73e8d0bdea96 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 23:41:21 -0400 Subject: [PATCH 092/214] clarify variable naming in run_tests.py --- run_tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/run_tests.py b/run_tests.py index 548372e..5aaee83 100644 --- a/run_tests.py +++ b/run_tests.py @@ -33,10 +33,10 @@ def main(): os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings # append to the existing coverage data for all but the first run if i > 0: - coverage_cmd = ['coverage', 'run', '--append', django_admin, 'test'] + test_cmd = ['coverage', 'run', '--append', django_admin, 'test'] else: - coverage_cmd = ['coverage', 'run', django_admin, 'test'] - results.append(call(coverage_cmd)) + test_cmd = ['coverage', 'run', django_admin, 'test'] + results.append(call(test_cmd)) results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) sys.exit(any(results) and 1 or 0) From 0e3a191f79a540ade92dea6804d13fa21fa6e0c9 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Tue, 30 Jun 2015 23:52:25 -0400 Subject: [PATCH 093/214] add substitute check_output method when running under Python 2.6 --- run_tests.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/run_tests.py b/run_tests.py index 5aaee83..372a5d2 100644 --- a/run_tests.py +++ b/run_tests.py @@ -7,7 +7,14 @@ import os import sys -from subprocess import call, check_output +from subprocess import call +try: + from subprocess import check_output +except ImportError: + # Python 2.6 doesn't have check_output. Note this will not raise a CalledProcessError + # like check_output does, but it should work for our purposes. + import subprocess + check_output = lambda x: subprocess.Popen(x, stdout=subprocess.PIPE).communicate()[0] NAME = os.path.basename(os.path.dirname(__file__)) ROOT = os.path.abspath(os.path.dirname(__file__)) From b5ebd9ccb626353081532137e2327d1775640c60 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 1 Jul 2015 00:27:43 -0400 Subject: [PATCH 094/214] add coveralls badge to README.rst --- README.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.rst b/README.rst index 2fb169e..899698b 100644 --- a/README.rst +++ b/README.rst @@ -12,6 +12,9 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. .. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.png :target: https://travis-ci.org/django-cache-machine/django-cache-machine +.. image:: https://coveralls.io/repos/django-cache-machine/django-cache-machine/badge.svg + :target: https://coveralls.io/r/django-cache-machine/django-cache-machine + Requirements ------------ From 4c90a011c70a5226223e1b6118b473f39761d2c1 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 1 Jul 2015 00:34:16 -0400 Subject: [PATCH 095/214] update README with new command to run tests --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 899698b..b61ae22 100644 --- a/README.rst +++ b/README.rst @@ -44,4 +44,4 @@ Get it from `github Date: Wed, 1 Jul 2015 00:56:24 -0400 Subject: [PATCH 096/214] Update coveralls badge URL --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index b61ae22..1992e05 100644 --- a/README.rst +++ b/README.rst @@ -12,8 +12,8 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. .. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.png :target: https://travis-ci.org/django-cache-machine/django-cache-machine -.. image:: https://coveralls.io/repos/django-cache-machine/django-cache-machine/badge.svg - :target: https://coveralls.io/r/django-cache-machine/django-cache-machine +.. image:: https://coveralls.io/repos/django-cache-machine/django-cache-machine/badge.svg?branch=master + :target: https://coveralls.io/r/django-cache-machine/django-cache-machine?branch=master Requirements From d1c3c90ee39136e122c083107b43cf5de320dddb Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 1 Jul 2015 01:21:00 -0400 Subject: [PATCH 097/214] test showing broken parse_backend_uri. refs #92 --- tests/test_cache.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_cache.py b/tests/test_cache.py index 7e0d7f3..c7c491a 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -489,3 +489,11 @@ def test_multidb_fetch_by_id(self): from_slave = Addon.objects.using('slave').get(id=1) assert from_slave.from_cache is False assert from_slave._state.db == 'slave' + + def test_parse_backend_uri(self): + """ Test that parse_backend_uri works as intended. Regression for #92. """ + from caching.invalidation import parse_backend_uri + uri = 'redis://127.0.0.1:6379?socket_timeout=5' + host, params = parse_backend_uri(uri) + self.assertEqual(host, '127.0.0.1:6379') + self.assertEqual(params, {'socket_timeout': '5'}) From 58c66a78db448b5bc5af825b8d4cc304837820f8 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 2 Jul 2015 21:57:21 -0400 Subject: [PATCH 098/214] first pass at Python 3 support --- caching/base.py | 28 ++++++++++----------- caching/compat.py | 13 +++++++++- caching/ext.py | 2 +- caching/invalidation.py | 30 ++++++++++++----------- examples/cache_machine/custom_backend.py | 2 +- examples/cache_machine/locmem_settings.py | 2 +- examples/cache_machine/memcache_byid.py | 2 +- examples/cache_machine/redis_byid.py | 2 +- examples/cache_machine/redis_settings.py | 2 +- examples/cache_machine/settings.py | 2 +- requirements.txt | 1 + run_tests.py | 1 + setup.py | 7 +++++- tests/test_cache.py | 17 +++++++------ 14 files changed, 66 insertions(+), 45 deletions(-) diff --git a/caching/base.py b/caching/base.py index 1449269..d37358a 100644 --- a/caching/base.py +++ b/caching/base.py @@ -8,7 +8,7 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding -from .compat import DEFAULT_TIMEOUT +from .compat import DEFAULT_TIMEOUT, u from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -89,7 +89,7 @@ def query_key(self): master), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = u'qs:%s::db:%s' % (self.query_string, self.db) + query_db_string = u('qs:{0}::db:{1}').format(self.query_string, self.db) return make_key(query_db_string, with_locale=False) def __iter__(self): @@ -113,7 +113,7 @@ def __iter__(self): to_cache = [] try: while True: - obj = iterator.next() + obj = next(iterator) obj.from_cache = False to_cache.append(obj) yield obj @@ -169,14 +169,14 @@ def fetch_by_id(self): """ # Include columns from extra since they could be used in the query's # order_by. - vals = self.values_list('pk', *self.query.extra.keys()) + vals = self.values_list('pk', *list(self.query.extra.keys())) pks = [val[0] for val in vals] keys = dict((byid(self.model._cache_key(pk, self.db)), pk) for pk in pks) - cached = dict((k, v) for k, v in cache.get_many(keys).items() + cached = dict((k, v) for k, v in list(cache.get_many(keys).items()) if v is not None) # Pick up the objects we missed. - missed = [pk for key, pk in keys.items() if key not in cached] + missed = [pk for key, pk in list(keys.items()) if key not in cached] if missed: others = self.fetch_missed(missed) # Put the fetched objects back in cache. @@ -186,7 +186,7 @@ def fetch_by_id(self): new = {} # Use pks to return the objects in the correct order. - objects = dict((o.pk, o) for o in cached.values() + new.values()) + objects = dict((o.pk, o) for o in list(cached.values()) + list(new.values())) for pk in pks: yield objects[pk] @@ -246,14 +246,14 @@ def _cache_key(cls, pk, db): For the Addon class, with a pk of 2, we get "o:addons.addon:2". """ key_parts = ('o', cls._meta, pk, db) - return ':'.join(map(encoding.smart_unicode, key_parts)) + return ':'.join(map(encoding.smart_text, key_parts)) def _cache_keys(self): """Return the cache key for self plus all related foreign keys.""" fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields if isinstance(f, models.ForeignKey)) - keys = [fk.rel.to._cache_key(val, self._state.db) for fk, val in fks.items() + keys = [fk.rel.to._cache_key(val, self._state.db) for fk, val in list(fks.items()) if val is not None and hasattr(fk.rel.to, '_cache_key')] return (self.cache_key,) + tuple(keys) @@ -270,7 +270,7 @@ def __iter__(self): if self.timeout == NO_CACHE: iterator = iterator() while True: - yield iterator.next() + yield next(iterator) else: sql = self.raw_query % tuple(self.params) for obj in CacheMachine(sql, iterator, timeout=self.timeout): @@ -302,7 +302,7 @@ def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) except (AttributeError, EmptyResultSet): - log.warning(u'%r cannot be cached.' % encoding.smart_str(obj)) + log.warning(u('%r cannot be cached.' % encoding.smart_str(obj))) return f() key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) @@ -352,11 +352,11 @@ def __init__(self, obj, func): def __call__(self, *args, **kwargs): k = lambda o: o.cache_key if hasattr(o, 'cache_key') else o - arg_keys = map(k, args) - kwarg_keys = [(key, k(val)) for key, val in kwargs.items()] + arg_keys = list(map(k, args)) + kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())] key_parts = ('m', self.obj.cache_key, self.func.__name__, arg_keys, kwarg_keys) - key = ':'.join(map(encoding.smart_unicode, key_parts)) + key = ':'.join(map(encoding.smart_text, key_parts)) if key not in self.cache: f = functools.partial(self.func, self.obj, *args, **kwargs) self.cache[key] = cached_with(self.obj, f, key) diff --git a/caching/compat.py b/caching/compat.py index 3f0f78d..3becbb6 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -1,6 +1,7 @@ +import sys import django -__all__ = ['DEFAULT_TIMEOUT', 'FOREVER'] +__all__ = ['DEFAULT_TIMEOUT', 'FOREVER', 'u', 'basestring_'] if django.VERSION[:2] >= (1, 6): @@ -10,3 +11,13 @@ else: DEFAULT_TIMEOUT = None FOREVER = 0 + +if sys.version_info < (3,): + import codecs + def u(x): + return codecs.unicode_escape_decode(x)[0] + basestring_ = basestring +else: + def u(x): + return x + basestring_ = str diff --git a/caching/ext.py b/caching/ext.py index b8fe3d5..b8b33bc 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -33,7 +33,7 @@ def parse(self, parser): # we only listen to ``'cache'`` so this will be a name token with # `cache` as value. We get the line number so that we can give # that line number to the nodes we create by hand. - lineno = parser.stream.next().lineno + lineno = next(parser.stream).lineno # Use the filename + line number and first object for the cache key. name = '%s+%s' % (self.name, lineno) diff --git a/caching/invalidation.py b/caching/invalidation.py index e352ef7..5c3a511 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -11,6 +11,8 @@ from django.utils import encoding, translation from django.utils.six.moves.urllib.parse import parse_qsl +from .compat import basestring_ + try: import redis as redislib except ImportError: @@ -30,29 +32,29 @@ CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -FLUSH = CACHE_PREFIX + ':flush:' +FLUSH = encoding.smart_bytes(CACHE_PREFIX + ':flush:') log = logging.getLogger('caching.invalidation') def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k)) + key = encoding.smart_bytes(':'.join((CACHE_PREFIX, k))) if with_locale: - key += encoding.smart_str(translation.get_language()) + key += encoding.smart_bytes(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. - return hashlib.md5(key).hexdigest() + return encoding.smart_bytes(hashlib.md5(key).hexdigest()) def flush_key(obj): """We put flush lists in the flush: namespace.""" - key = obj if isinstance(obj, basestring) else obj.cache_key + key = obj if isinstance(obj, basestring_) else obj.cache_key return FLUSH + make_key(key, with_locale=False) def byid(obj): - key = obj if isinstance(obj, basestring) else obj.cache_key + key = obj if isinstance(obj, basestring_) else obj.cache_key return make_key('byid:' + key) @@ -67,7 +69,7 @@ def decorator(f): def wrapper(*args, **kw): try: return f(*args, **kw) - except (socket.error, redislib.RedisError), e: + except (socket.error, redislib.RedisError) as e: log.error('redis error: %s' % e) # log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) if hasattr(return_type, '__call__'): @@ -137,8 +139,8 @@ def find_flush_lists(self, keys): def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" flush_lists = collections.defaultdict(set) - flush_lists.update(cache.get_many(mapping.keys())) - for key, list_ in mapping.items(): + flush_lists.update(cache.get_many(list(mapping.keys()))) + for key, list_ in list(mapping.items()): if flush_lists[key] is None: flush_lists[key] = set(list_) else: @@ -148,7 +150,7 @@ def add_to_flush_list(self, mapping): def get_flush_lists(self, keys): """Return a set of object keys from the lists in `keys`.""" return set(e for flush_list in - filter(None, cache.get_many(keys).values()) + [_f for _f in list(cache.get_many(keys).values()) if _f] for e in flush_list) def clear_flush_lists(self, keys): @@ -159,7 +161,7 @@ def clear_flush_lists(self, keys): class RedisInvalidator(Invalidator): def safe_key(self, key): - if ' ' in key or '\n' in key: + if b' ' in key or b'\n' in key: log.warning('BAD KEY: "%s"' % key) return '' return key @@ -168,18 +170,18 @@ def safe_key(self, key): def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" pipe = redis.pipeline(transaction=False) - for key, list_ in mapping.items(): + for key, list_ in list(mapping.items()): for query_key in list_: pipe.sadd(self.safe_key(key), query_key) pipe.execute() @safe_redis(set) def get_flush_lists(self, keys): - return redis.sunion(map(self.safe_key, keys)) + return redis.sunion(list(map(self.safe_key, keys))) @safe_redis(None) def clear_flush_lists(self, keys): - redis.delete(*map(self.safe_key, keys)) + redis.delete(*list(map(self.safe_key, keys))) class NullInvalidator(Invalidator): diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index 0b7b45d..53e2789 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -1,4 +1,4 @@ -from settings import * # flake8: noqa +from .settings import * # flake8: noqa CACHES = { 'default': { diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index d96ed81..bfe4c62 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -1,4 +1,4 @@ -from settings import * # flake8: noqa +from .settings import * # flake8: noqa CACHES = { 'default': { diff --git a/examples/cache_machine/memcache_byid.py b/examples/cache_machine/memcache_byid.py index f100ba4..4098de6 100644 --- a/examples/cache_machine/memcache_byid.py +++ b/examples/cache_machine/memcache_byid.py @@ -1,3 +1,3 @@ -from settings import * # flake8: noqa +from .settings import * # flake8: noqa FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_byid.py b/examples/cache_machine/redis_byid.py index 747015e..61eb247 100644 --- a/examples/cache_machine/redis_byid.py +++ b/examples/cache_machine/redis_byid.py @@ -1,3 +1,3 @@ -from redis_settings import * # flake8: noqa +from .redis_settings import * # flake8: noqa FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_settings.py b/examples/cache_machine/redis_settings.py index 5c228c9..1214942 100644 --- a/examples/cache_machine/redis_settings.py +++ b/examples/cache_machine/redis_settings.py @@ -1,4 +1,4 @@ -from settings import * # flake8: noqa +from .settings import * # flake8: noqa CACHE_MACHINE_USE_REDIS = True REDIS_BACKEND = 'redis://' diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 0048a04..3035919 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -1,6 +1,6 @@ CACHES = { 'default': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', + 'BACKEND': 'caching.backends.memcached.PyLibMCCache', 'LOCATION': 'localhost:11211', }, } diff --git a/requirements.txt b/requirements.txt index cd7dbbb..4ab3e28 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ sphinx mock django-nose python-memcached +pylibmc jinja2 redis flake8 diff --git a/run_tests.py b/run_tests.py index 372a5d2..326962f 100644 --- a/run_tests.py +++ b/run_tests.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python """ Creating standalone Django apps is a PITA because you're not in a project, so you don't have a settings.py file. I can never remember to define diff --git a/setup.py b/setup.py index 5287164..5a28120 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,11 @@ from setuptools import setup +import sys import caching +extra = {} +if sys.version_info >= (3,): + extra['use_2to3'] = True setup( name='django-cache-machine', @@ -27,5 +31,6 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', - ] + ], + **extra ) diff --git a/tests/test_cache.py b/tests/test_cache.py index c7c491a..acbdb97 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import django from django.conf import settings from django.test import TestCase @@ -9,10 +8,11 @@ from nose.tools import eq_ from caching import base, invalidation +from caching.compat import u cache = invalidation.cache -from testapp.models import Addon, User +from .testapp.models import Addon, User if django.get_version().startswith('1.3'): class settings_patch(object): @@ -21,7 +21,7 @@ def __init__(self, **kwargs): def __enter__(self): self._old_settings = dict((k, getattr(settings, k, None)) for k in self.options) - for k, v in self.options.items(): + for k, v in list(self.options.items()): setattr(settings, k, v) def __exit__(self, *args): @@ -342,12 +342,13 @@ def f(): eq_(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): - u = ':'.join(map(encoding.smart_str, [u'תיאור אוסף'])) + ustr = u('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 \\u05d0\\u05d5\\u05e1\\u05e3') + ustr = ':'.join(map(encoding.smart_str, [ustr])) obj = mock.Mock() - obj.query_key.return_value = u'xxx' + obj.query_key.return_value = u('xxx') obj.flush_key.return_value = 'key' f = lambda: 1 - eq_(base.cached_with(obj, f, 'adf:%s' % u), 1) + eq_(base.cached_with(obj, f, 'adf:%s' % ustr), 1) def test_cached_method(self): a = Addon.objects.get(id=1) @@ -428,7 +429,7 @@ def test_cache_machine_timeout(self, cache): eq_(kwargs, {'timeout': 12}) def test_unicode_key(self): - list(User.objects.filter(name=u'ümlaüt')) + list(User.objects.filter(name=u('\\xfcmla\\xfct'))) def test_empty_in(self): # Raised an exception before fixing #2. @@ -459,7 +460,7 @@ def test_invalidate_new_object(self): eq_([a.val for a in u.addon_set.all()], [42, 17]) def test_make_key_unicode(self): - translation.activate(u'en-US') + translation.activate(u('en-US')) f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' # This would crash with a unicode error. base.make_key(f, with_locale=True) From af4ed382b499483157641f35125139137bfd8e3d Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 2 Jul 2015 22:10:32 -0400 Subject: [PATCH 099/214] correct flake8 error --- caching/compat.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/caching/compat.py b/caching/compat.py index 3becbb6..4261097 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -14,9 +14,10 @@ if sys.version_info < (3,): import codecs + def u(x): return codecs.unicode_escape_decode(x)[0] - basestring_ = basestring + basestring_ = basestring # flake8: noqa else: def u(x): return x From 20659b51b6f5a348e129beb0f787a9e917618888 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 2 Jul 2015 22:25:03 -0400 Subject: [PATCH 100/214] add Python 3.3 and 3.4 to .travis.yml --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index ebc0c26..d3e4199 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,8 @@ services: python: - "2.6" - "2.7" + - "3.3" + - "3.4" install: - pip install -e . - pip install -r requirements.txt "$DJANGO_SPEC" @@ -26,3 +28,7 @@ matrix: env: DJANGO_SPEC="Django>=1.7,<1.8" - python: "2.6" env: DJANGO_SPEC="Django>=1.8,<1.9" + - python: "3.3" + env: DJANGO_SPEC="Django>=1.4,<1.5" + - python: "3.4" + env: DJANGO_SPEC="Django>=1.4,<1.5" From 9d87b19a1a5a39e4b18278fad4851dbe2e7459c3 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 2 Jul 2015 22:26:04 -0400 Subject: [PATCH 101/214] clean up formatting in settings.py --- examples/cache_machine/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 3035919..b06f007 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -15,7 +15,7 @@ 'slave': { 'NAME': 'test_slave.db', 'ENGINE': 'django.db.backends.sqlite3', - } + }, } INSTALLED_APPS = ( From 98c07f3a03d46a8ef7c2b106494c26e3c0972452 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 01:03:26 -0400 Subject: [PATCH 102/214] Python 3 fixes --- caching/invalidation.py | 15 ++++++++------- examples/cache_machine/custom_backend.py | 2 +- tests/test_cache.py | 4 ++-- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index 5c3a511..8818341 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -32,19 +32,19 @@ CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -FLUSH = encoding.smart_bytes(CACHE_PREFIX + ':flush:') +FLUSH = CACHE_PREFIX + ':flush:' log = logging.getLogger('caching.invalidation') def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_bytes(':'.join((CACHE_PREFIX, k))) + key = '%s:%s' % (CACHE_PREFIX, encoding.smart_text(k)) if with_locale: - key += encoding.smart_bytes(translation.get_language()) + key += encoding.smart_text(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. - return encoding.smart_bytes(hashlib.md5(key).hexdigest()) + return hashlib.md5(encoding.smart_bytes(key)).hexdigest() def flush_key(obj): @@ -161,7 +161,7 @@ def clear_flush_lists(self, keys): class RedisInvalidator(Invalidator): def safe_key(self, key): - if b' ' in key or b'\n' in key: + if ' ' in key or '\n' in key: log.warning('BAD KEY: "%s"' % key) return '' return key @@ -172,12 +172,13 @@ def add_to_flush_list(self, mapping): pipe = redis.pipeline(transaction=False) for key, list_ in list(mapping.items()): for query_key in list_: - pipe.sadd(self.safe_key(key), query_key) + pipe.sadd(self.safe_key(key), query_key.encode('utf-8')) pipe.execute() @safe_redis(set) def get_flush_lists(self, keys): - return redis.sunion(list(map(self.safe_key, keys))) + flush_list = redis.sunion(list(map(self.safe_key, keys))) + return [k.decode('utf-8') for k in flush_list] @safe_redis(None) def clear_flush_lists(self, keys): diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index 53e2789..0c0b920 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -5,7 +5,7 @@ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'cache_machine': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', + 'BACKEND': 'caching.backends.memcached.PyLibMCCache', 'LOCATION': 'localhost:11211', }, } diff --git a/tests/test_cache.py b/tests/test_cache.py index acbdb97..1408842 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -460,8 +460,8 @@ def test_invalidate_new_object(self): eq_([a.val for a in u.addon_set.all()], [42, 17]) def test_make_key_unicode(self): - translation.activate(u('en-US')) - f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' + translation.activate('en-US') + f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e\xa6' # This would crash with a unicode error. base.make_key(f, with_locale=True) translation.deactivate() From 408b8378a7fcbd211a9a9051064e6909c2768acc Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 01:28:12 -0400 Subject: [PATCH 103/214] debugging for Travis import failures --- tests/test_cache.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_cache.py b/tests/test_cache.py index 1408842..ca7bd1c 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -7,6 +7,13 @@ import mock from nose.tools import eq_ +# debugging Travis import failures +import os +import sys +sys.stderr.write(str(os.environ['PYTHONPATH'].split(os.pathsep)) + '\n\n') +from .testapp.models import Addon, User +import caching + from caching import base, invalidation from caching.compat import u From 92de7e10d28cd7b9bbb42c85356b3b90266675b7 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 01:46:03 -0400 Subject: [PATCH 104/214] remove 2to3 from setup.py (code runs on both Python 2 and 3 without it) --- setup.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 5a28120..5287164 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,7 @@ from setuptools import setup -import sys import caching -extra = {} -if sys.version_info >= (3,): - extra['use_2to3'] = True setup( name='django-cache-machine', @@ -31,6 +27,5 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', - ], - **extra + ] ) From 6a22fd671a5b9e19cb38939fe5df907f146c5771 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 01:47:15 -0400 Subject: [PATCH 105/214] Revert "debugging for Travis import failures" This reverts commit 408b8378a7fcbd211a9a9051064e6909c2768acc. --- tests/test_cache.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index ca7bd1c..1408842 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -7,13 +7,6 @@ import mock from nose.tools import eq_ -# debugging Travis import failures -import os -import sys -sys.stderr.write(str(os.environ['PYTHONPATH'].split(os.pathsep)) + '\n\n') -from .testapp.models import Addon, User -import caching - from caching import base, invalidation from caching.compat import u From 3d4460bfbf777fa38a4350b91b34a5039aaf86a4 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 02:03:24 -0400 Subject: [PATCH 106/214] add separate requirements file for Python 3 since there is no version of python-memcached that supports both Python 2 and 3 yet --- .travis.yml | 2 +- README.rst | 2 +- examples/cache_machine/custom_backend.py | 2 +- examples/cache_machine/settings.py | 2 +- requirements.txt => requirements/base.txt | 2 -- requirements/py2.txt | 2 ++ requirements/py3.txt | 2 ++ 7 files changed, 8 insertions(+), 6 deletions(-) rename requirements.txt => requirements/base.txt (80%) create mode 100644 requirements/py2.txt create mode 100644 requirements/py3.txt diff --git a/.travis.yml b/.travis.yml index d3e4199..59f9abb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,7 +9,7 @@ python: - "3.4" install: - pip install -e . - - pip install -r requirements.txt "$DJANGO_SPEC" + - pip install -r requirements/py`echo $TRAVIS_PYTHON_VERSION|cut -d'.' -f1`.txt "$DJANGO_SPEC" - pip install coveralls script: - python run_tests.py diff --git a/README.rst b/README.rst index 1992e05..4737fc0 100644 --- a/README.rst +++ b/README.rst @@ -43,5 +43,5 @@ Get it from `github Date: Fri, 3 Jul 2015 02:23:10 -0400 Subject: [PATCH 107/214] clean up diff for py3k branch (remove unnecessary changes) --- caching/base.py | 2 +- caching/invalidation.py | 4 +++- run_tests.py | 1 - tests/test_cache.py | 4 ++-- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/caching/base.py b/caching/base.py index d37358a..1e022ba 100644 --- a/caching/base.py +++ b/caching/base.py @@ -89,7 +89,7 @@ def query_key(self): master), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = u('qs:{0}::db:{1}').format(self.query_string, self.db) + query_db_string = u('qs:%s::db:%s') % (self.query_string, self.db) return make_key(query_db_string, with_locale=False) def __iter__(self): diff --git a/caching/invalidation.py b/caching/invalidation.py index 8818341..eb6a021 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -39,7 +39,7 @@ def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = '%s:%s' % (CACHE_PREFIX, encoding.smart_text(k)) + key = encoding.smart_text('%s:%s' % (CACHE_PREFIX, k)) if with_locale: key += encoding.smart_text(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice @@ -172,6 +172,8 @@ def add_to_flush_list(self, mapping): pipe = redis.pipeline(transaction=False) for key, list_ in list(mapping.items()): for query_key in list_: + # Redis happily accepts unicode, but returns byte strings, + # so manually encode and decode the keys on the flush list here pipe.sadd(self.safe_key(key), query_key.encode('utf-8')) pipe.execute() diff --git a/run_tests.py b/run_tests.py index 326962f..372a5d2 100644 --- a/run_tests.py +++ b/run_tests.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Creating standalone Django apps is a PITA because you're not in a project, so you don't have a settings.py file. I can never remember to define diff --git a/tests/test_cache.py b/tests/test_cache.py index 1408842..d37e961 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -342,8 +342,8 @@ def f(): eq_(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): - ustr = u('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 \\u05d0\\u05d5\\u05e1\\u05e3') - ustr = ':'.join(map(encoding.smart_str, [ustr])) + ustr = encoding.smart_bytes(u('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' + '\\u05d0\\u05d5\\u05e1\\u05e3')) obj = mock.Mock() obj.query_key.return_value = u('xxx') obj.flush_key.return_value = 'key' From 6842c6d3610ab74a6e996fb60d623488ce186881 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 02:37:04 -0400 Subject: [PATCH 108/214] get rid of last remaining referneces to smart_str, which behaves differently on Python 2 and 3 --- caching/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/caching/base.py b/caching/base.py index 1e022ba..dbe9c03 100644 --- a/caching/base.py +++ b/caching/base.py @@ -302,10 +302,10 @@ def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) except (AttributeError, EmptyResultSet): - log.warning(u('%r cannot be cached.' % encoding.smart_str(obj))) + log.warning(u('%r cannot be cached.') % encoding.smart_text(obj)) return f() - key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) + key = u('%s:%s') % tuple(map(encoding.smart_text, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. invalidator.add_to_flush_list( {obj.flush_key(): [_function_cache_key(key)]}) From ce2ade522aab9051db34436467e8176564aa542a Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 03:21:34 -0400 Subject: [PATCH 109/214] version bump and update release notes for 0.8.1 release --- caching/__init__.py | 4 ++-- docs/releases.rst | 13 ++++++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/caching/__init__.py b/caching/__init__.py index 00a5e09..ca9d612 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,2 @@ -VERSION = (0, '8') -__version__ = '.'.join(map(str, VERSION)) +VERSION = ('0', '8', '1') +__version__ = '.'.join(VERSION) diff --git a/docs/releases.rst b/docs/releases.rst index fcea53d..024118e 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,13 +3,20 @@ Release Notes ================== -v0.8.1 (release date TBD) --------------------------------------- +v0.9 (release date TBD) +----------------------- + +- Support for Python 3 + +v0.8.1 (2015-07-03) +----------------------- This release is primarily aimed at adding support for more recent versions of Django and catching up on recent contributions. -- Allow test suite to run under Django 1.7 and Django 1.8 +- Support for Django 1.7 and Django 1.8 +- Fix bug in parsing of ``REDIS_BACKEND`` URI +- Miscellaneous bug fixes and documentation corrections Backwards Incompatible Changes ________________________________ From b5370a34e8142610cfdc15a51175c160b2e97ef8 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 12:06:26 -0400 Subject: [PATCH 110/214] allow running tests without coverage.py and with only a single settings file to speed up test runs during development --- .travis.yml | 2 +- run_tests.py | 26 +++++++++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index ebc0c26..6845b66 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,7 +10,7 @@ install: - pip install -r requirements.txt "$DJANGO_SPEC" - pip install coveralls script: - - python run_tests.py + - python run_tests.py --with-coverage - flake8 . after_success: - coveralls diff --git a/run_tests.py b/run_tests.py index 372a5d2..6e7da33 100644 --- a/run_tests.py +++ b/run_tests.py @@ -6,6 +6,7 @@ """ import os import sys +import argparse from subprocess import call try: @@ -33,18 +34,29 @@ def main(): + parser = argparse.ArgumentParser(description='Process some integers.') + parser.add_argument('--with-coverage', action='store_true', + help='Run tests with coverage.py and display coverage report') + parser.add_argument('--settings', choices=SETTINGS, + help='Run tests only for the specified settings file') + args = parser.parse_args() + settings = args.settings and [args.settings] or SETTINGS results = [] django_admin = check_output(['which', 'django-admin.py']).strip() - for i, settings in enumerate(SETTINGS): - print('Running tests for: %s' % settings) - os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings + for i, settings_module in enumerate(settings): + print('Running tests for: %s' % settings_module) + os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings_module # append to the existing coverage data for all but the first run - if i > 0: - test_cmd = ['coverage', 'run', '--append', django_admin, 'test'] + if args.with_coverage and i > 0: + test_cmd = ['coverage', 'run', '--append'] + elif args.with_coverage: + test_cmd = ['coverage', 'run'] else: - test_cmd = ['coverage', 'run', django_admin, 'test'] + test_cmd = [] + test_cmd += [django_admin, 'test'] results.append(call(test_cmd)) - results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) + if args.with_coverage: + results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) sys.exit(any(results) and 1 or 0) From 3a0884d52c9edf597988426d15732f5c7b4540e3 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 12:10:37 -0400 Subject: [PATCH 111/214] add top level help text for argparse --- run_tests.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/run_tests.py b/run_tests.py index 6e7da33..6bea3fb 100644 --- a/run_tests.py +++ b/run_tests.py @@ -34,7 +34,9 @@ def main(): - parser = argparse.ArgumentParser(description='Process some integers.') + parser = argparse.ArgumentParser(description='Run the tests for django-cache-machine. ' + 'If no options are specified, tests will be run with ' + 'all settings files and without coverage.py.') parser.add_argument('--with-coverage', action='store_true', help='Run tests with coverage.py and display coverage report') parser.add_argument('--settings', choices=SETTINGS, From af921024671be1c6492fa64fcaabb6f579105c7a Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 12:22:26 -0400 Subject: [PATCH 112/214] fix typo in link to pylibmc --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index af01cb6..c7c9564 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -63,7 +63,7 @@ options simply define a separate ``cache_machine`` entry for the .. note:: Cache Machine also supports the other memcache backend support by - Django >= 1.4 based on pylibmbc_: + Django >= 1.4 based on pylibmc_: ``caching.backends.memcached.PyLibMCCache``. .. _pylibmc: http://sendapatch.se/projects/pylibmc/ From faf33ec975dfb1b765bb16431a24ee1f04a82436 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 13:07:31 -0400 Subject: [PATCH 113/214] add regression test for issue #14 --- tests/test_cache.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_cache.py b/tests/test_cache.py index c7c491a..a033219 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -434,6 +434,10 @@ def test_empty_in(self): # Raised an exception before fixing #2. eq_([], list(User.objects.filter(pk__in=[]))) + def test_empty_in_count(self): + # Regression test for #14. + eq_(0, User.objects.filter(pk__in=[]).count()) + def test_empty_queryset(self): for k in (1, 1): with self.assertNumQueries(k): From 6c5ceb0635364e099a346d65c7db58dc0bfc5593 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 15:34:23 -0400 Subject: [PATCH 114/214] remove "needs maintainer" notice from README.rst --- README.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.rst b/README.rst index 1992e05..ca5917e 100644 --- a/README.rst +++ b/README.rst @@ -2,8 +2,6 @@ Cache Machine ============= -`This project needs a maintainer. `_ - Cache Machine provides automatic caching and invalidation for Django models through the ORM. From 4df7f607fc8d99bc0d0d1f1660d673c27c390619 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 15:42:03 -0400 Subject: [PATCH 115/214] update README.rst with support Python and Django versions --- README.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index ca5917e..75d770f 100644 --- a/README.rst +++ b/README.rst @@ -17,13 +17,13 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine requires Django 1.3+. It was written and tested on Python 2.6. +Cache Machine works with Django 1.4-1.8 and Python 2.6 and 2.7. Python 3.3 +and 3.4 support is in progress in the ``py3k`` branch. Installation ------------ - Get it from `pypi `_:: pip install django-cache-machine @@ -36,7 +36,6 @@ or `github `_:: Running Tests ------------- - Get it from `github `_:: git clone git://github.com/django-cache-machine/django-cache-machine.git From 518382abc5dfd14720d8fef78dfc47b9924830fd Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jul 2015 18:00:33 -0400 Subject: [PATCH 116/214] add documentation for using redis for flush lists. fixes #37 --- docs/index.rst | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index c7c9564..57cdc74 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -215,7 +215,26 @@ If someone wants to write a template tag for Django templates, I'd love to add it. -Classes that May Interest You +Redis Support +------------- + +Cache Machine support storing flush lists in Redis rather than memcached, which +is more efficient because Redis can manipulate the lists on the server side +rather than having to tranfer the entire list back and forth for each +modification. + +To enable Redis support for Cache Machine, add the following to your settings +file, replacing ``localhost`` with the hostname of your Redis server:: + + CACHE_MACHINE_USE_REDIS = True + REDIS_BACKEND = 'redis://localhost:6379' + +**Please note:** When using Redis, memcached is still used for caching model +objects, i.e., only the flush lists are stored in Redis. You still need to +configure ``CACHES`` the way you would normally for Cache Machine. + + +Classes That May Interest You ----------------------------- .. autoclass:: caching.base.CacheMachine From cf172651913c95d7b9af21faf25dd0aea0363bd2 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 4 Jul 2015 11:34:41 -0400 Subject: [PATCH 117/214] force pip to install from source (workaround for bug in coverage.py installation that results in slow coverage run times) --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index cd7dbbb..debfe2a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ # These are the reqs to build docs and run tests. +--no-binary :all: # workaround for https://bitbucket.org/ned/coveragepy/issue/382/pip-install-coverage-uses-slower-pytracer sphinx mock django-nose From 8caa2b1df2ccd56c9dab406dd686299f26e73d46 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 4 Jul 2015 11:45:43 -0400 Subject: [PATCH 118/214] put redis note in a note field in docs --- docs/index.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 57cdc74..763c68c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -229,9 +229,10 @@ file, replacing ``localhost`` with the hostname of your Redis server:: CACHE_MACHINE_USE_REDIS = True REDIS_BACKEND = 'redis://localhost:6379' -**Please note:** When using Redis, memcached is still used for caching model -objects, i.e., only the flush lists are stored in Redis. You still need to -configure ``CACHES`` the way you would normally for Cache Machine. +.. note:: + When using Redis, memcached is still used for caching model objects, i.e., + only the flush lists are stored in Redis. You still need to configure + ``CACHES`` the way you would normally for Cache Machine. Classes That May Interest You From 5914395dbed8286710da5f4a95872cb87aa526d2 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 4 Jul 2015 11:57:21 -0400 Subject: [PATCH 119/214] make sure travis is using the latest pip --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 6845b66..5eb49d2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,7 @@ python: - "2.6" - "2.7" install: + - pip install -U pip # make sure we have the latest version - pip install -e . - pip install -r requirements.txt "$DJANGO_SPEC" - pip install coveralls From a2222284fa668c3fa81b246fa2d7106f90a03024 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 5 Jul 2015 19:14:38 -0400 Subject: [PATCH 120/214] add support for invalidating all queries associated with a model when a new object is created. refs #6 --- caching/base.py | 54 +++++++++++++++++++++++++--------------- caching/config.py | 18 ++++++++++++++ caching/invalidation.py | 55 +++++++++++++++++++++++------------------ docs/index.rst | 13 ++++++++++ tests/test_cache.py | 43 ++++++++++++++++++++++++++------ 5 files changed, 131 insertions(+), 52 deletions(-) create mode 100644 caching/config.py diff --git a/caching/base.py b/caching/base.py index 1449269..ed7640c 100644 --- a/caching/base.py +++ b/caching/base.py @@ -8,6 +8,7 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding +from caching import config from .compat import DEFAULT_TIMEOUT from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -21,12 +22,6 @@ def emit(self, record): log = logging.getLogger('caching') log.addHandler(NullHandler()) -NO_CACHE = -1 -CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') -FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) -TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', NO_CACHE) - class CachingManager(models.Manager): @@ -45,14 +40,23 @@ def contribute_to_class(self, cls, name): return super(CachingManager, self).contribute_to_class(cls, name) def post_save(self, instance, **kwargs): - self.invalidate(instance) + self.invalidate(instance, is_new_instance=kwargs['created'], + model_cls=kwargs['sender']) def post_delete(self, instance, **kwargs): self.invalidate(instance) - def invalidate(self, *objects): + def invalidate(self, *objects, **kwargs): """Invalidate all the flush lists associated with ``objects``.""" keys = [k for o in objects for k in o._cache_keys()] + # If whole-model invalidation on create is enabled, include this model's + # key in the list to be invalidated. Note that the key itself won't + # contain anything in the cache, but its corresponding flush key will. + is_new_instance = kwargs.pop('is_new_instance', False) + model_cls = kwargs.pop('model_cls', None) + if config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and \ + is_new_instance and model_cls and hasattr(model_cls, 'model_key'): + keys.append(model_cls.model_key()) invalidator.invalidate_keys(keys) def raw(self, raw_query, params=None, *args, **kwargs): @@ -63,7 +67,7 @@ def cache(self, timeout=DEFAULT_TIMEOUT): return self.get_queryset().cache(timeout) def no_cache(self): - return self.cache(NO_CACHE) + return self.cache(config.NO_CACHE) class CacheMachine(object): @@ -74,7 +78,8 @@ class CacheMachine(object): called to get an iterator over some database results. """ - def __init__(self, query_string, iter_function, timeout=DEFAULT_TIMEOUT, db='default'): + def __init__(self, model, query_string, iter_function, timeout=DEFAULT_TIMEOUT, db='default'): + self.model = model self.query_string = query_string self.iter_function = iter_function self.timeout = timeout @@ -118,7 +123,7 @@ def __iter__(self): to_cache.append(obj) yield obj except StopIteration: - if to_cache or CACHE_EMPTY_QUERYSETS: + if to_cache or config.CACHE_EMPTY_QUERYSETS: self.cache_objects(to_cache) raise @@ -127,7 +132,7 @@ def cache_objects(self, objects): query_key = self.query_key() query_flush = flush_key(self.query_string) cache.add(query_key, objects, timeout=self.timeout) - invalidator.cache_objects(objects, query_key, query_flush) + invalidator.cache_objects(self.model, objects, query_key, query_flush) class CachingQuerySet(models.query.QuerySet): @@ -146,7 +151,7 @@ def query_key(self): def iterator(self): iterator = super(CachingQuerySet, self).iterator - if self.timeout == NO_CACHE: + if self.timeout == config.NO_CACHE: return iter(iterator()) else: try: @@ -154,9 +159,9 @@ def iterator(self): query_string = self.query_key() except query.EmptyResultSet: return iterator() - if FETCH_BY_ID: + if config.FETCH_BY_ID: iterator = self.fetch_by_id - return iter(CacheMachine(query_string, iterator, self.timeout, db=self.db)) + return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db)) def fetch_by_id(self): """ @@ -208,10 +213,10 @@ def count(self): query_string = 'count:%s' % self.query_key() except query.EmptyResultSet: return 0 - if self.timeout == NO_CACHE or TIMEOUT == NO_CACHE: + if self.timeout == config.NO_CACHE or config.TIMEOUT == config.NO_CACHE: return super_count() else: - return cached_with(self, super_count, query_string, TIMEOUT) + return cached_with(self, super_count, query_string, config.TIMEOUT) def cache(self, timeout=DEFAULT_TIMEOUT): qs = self._clone() @@ -219,7 +224,7 @@ def cache(self, timeout=DEFAULT_TIMEOUT): return qs def no_cache(self): - return self.cache(NO_CACHE) + return self.cache(config.NO_CACHE) def _clone(self, *args, **kw): qs = super(CachingQuerySet, self)._clone(*args, **kw) @@ -238,6 +243,15 @@ def cache_key(self): """Return a cache key based on the object's primary key.""" return self._cache_key(self.pk, self._state.db) + @classmethod + def model_key(cls): + """ + Return a cache key for the entire model (used by invalidation). + """ + # use dummy PK and DB reference that will never resolve to an actual + # cache key for an objection + return cls._cache_key('all-pks', 'all-dbs') + @classmethod def _cache_key(cls, pk, db): """ @@ -267,13 +281,13 @@ def __init__(self, *args, **kw): def __iter__(self): iterator = super(CachingRawQuerySet, self).__iter__ - if self.timeout == NO_CACHE: + if self.timeout == config.NO_CACHE: iterator = iterator() while True: yield iterator.next() else: sql = self.raw_query % tuple(self.params) - for obj in CacheMachine(sql, iterator, timeout=self.timeout): + for obj in CacheMachine(self.model, sql, iterator, timeout=self.timeout): yield obj raise StopIteration diff --git a/caching/config.py b/caching/config.py new file mode 100644 index 0000000..99e3925 --- /dev/null +++ b/caching/config.py @@ -0,0 +1,18 @@ +from django.conf import settings + +NO_CACHE = -1 +WHOLE_MODEL = 'whole-model' + +CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') +FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) +FLUSH = CACHE_PREFIX + ':flush:' +CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) +TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', NO_CACHE) +CACHE_INVALIDATE_ON_CREATE = getattr(settings, 'CACHE_INVALIDATE_ON_CREATE', None) +CACHE_MACHINE_NO_INVALIDATION = getattr(settings, 'CACHE_MACHINE_NO_INVALIDATION', False) +CACHE_MACHINE_USE_REDIS = getattr(settings, 'CACHE_MACHINE_USE_REDIS', False) + +_invalidate_on_create_values = (None, WHOLE_MODEL) +if CACHE_INVALIDATE_ON_CREATE not in _invalidate_on_create_values: + raise ValueError('CACHE_INVALIDATE_ON_CREATE must be one of: ' + '%s' % _invalidate_on_create_values) diff --git a/caching/invalidation.py b/caching/invalidation.py index e352ef7..1e2963e 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -27,17 +27,14 @@ except (InvalidCacheBackendError, ValueError): cache = default_cache - -CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') -FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -FLUSH = CACHE_PREFIX + ':flush:' +from caching import config log = logging.getLogger('caching.invalidation') def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k)) + key = encoding.smart_str('%s:%s' % (config.CACHE_PREFIX, k)) if with_locale: key += encoding.smart_str(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice @@ -48,7 +45,7 @@ def make_key(k, with_locale=True): def flush_key(obj): """We put flush lists in the flush: namespace.""" key = obj if isinstance(obj, basestring) else obj.cache_key - return FLUSH + make_key(key, with_locale=False) + return config.FLUSH + make_key(key, with_locale=False) def byid(obj): @@ -84,14 +81,15 @@ def invalidate_keys(self, keys): """Invalidate all the flush lists named by the list of ``keys``.""" if not keys: return - flush, flush_keys = self.find_flush_lists(keys) - - if flush: - cache.delete_many(flush) + obj_keys, flush_keys = self.find_flush_lists(keys) + if obj_keys: + log.debug('obj_keys: %s' % obj_keys) + cache.delete_many(obj_keys) if flush_keys: + log.debug('flush_keys: %s' % flush_keys) self.clear_flush_lists(flush_keys) - def cache_objects(self, objects, query_key, query_flush): + def cache_objects(self, model, objects, query_key, query_flush): # Add this query to the flush list of each object. We include # query_flush so that other things can be cached against the queryset # and still participate in invalidation. @@ -99,16 +97,21 @@ def cache_objects(self, objects, query_key, query_flush): flush_lists = collections.defaultdict(set) for key in flush_keys: + log.debug('adding %s to %s' % (query_flush, key)) flush_lists[key].add(query_flush) flush_lists[query_flush].add(query_key) - + # Add this query to the flush key for the entire model, if enabled + model_flush = flush_key(model.model_key()) + if config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL: + flush_lists[model_flush].add(query_key) # Add each object to the flush lists of its foreign keys. for obj in objects: obj_flush = obj.flush_key() for key in map(flush_key, obj._cache_keys()): - if key != obj_flush: + if key not in (obj_flush, model_flush): + log.debug('related: adding %s to %s' % (obj_flush, key)) flush_lists[key].add(obj_flush) - if FETCH_BY_ID: + if config.FETCH_BY_ID: flush_lists[key].add(byid(obj)) self.add_to_flush_list(flush_lists) @@ -119,20 +122,24 @@ def find_flush_lists(self, keys): The search starts with the lists in `keys` and expands to any flush lists found therein. Returns ({objects to flush}, {flush keys found}). """ - new_keys = keys = set(map(flush_key, keys)) - flush = set(keys) + objs = set(keys) + search_keys = keys = set(map(flush_key, keys)) # Add other flush keys from the lists, which happens when a parent # object includes a foreign key. while 1: - to_flush = self.get_flush_lists(new_keys) - flush.update(to_flush) - new_keys = set(k for k in to_flush if k.startswith(FLUSH)) - diff = new_keys.difference(keys) - if diff: + new_keys = set() + for key in self.get_flush_lists(search_keys): + if key.startswith(config.FLUSH): + new_keys.add(key) + else: + objs.add(key) + if new_keys: + log.debug('search for %s found keys %s' % (search_keys, new_keys)) keys.update(new_keys) + search_keys = new_keys else: - return flush, keys + return objs, keys def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" @@ -242,9 +249,9 @@ def get_redis_backend(): socket_timeout=socket_timeout) -if getattr(settings, 'CACHE_MACHINE_NO_INVALIDATION', False): +if config.CACHE_MACHINE_NO_INVALIDATION: invalidator = NullInvalidator() -elif getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): +elif config.CACHE_MACHINE_USE_REDIS: redis = get_redis_backend() invalidator = RedisInvalidator() else: diff --git a/docs/index.rst b/docs/index.rst index 763c68c..e510813 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -90,6 +90,19 @@ By default cache machine will not cache empty querysets. To cache them:: CACHE_EMPTY_QUERYSETS = True +Object creation +^^^^^^^^^^^^^^^ + +By default Cache Machine does not invalidate queries when a new object is +created, because it can be expensive to maintain a flush list of all the +queries associated with a given table and cause significant disruption on +high-volume sites when *all* the queries for a particular model are +invalidated at once. If these are not issues for your site and immediate +inclusion of created objects in previously cached queries is desired, you +can enable this feature as follows:: + + CACHE_INVALIDATE_ON_CREATE = 'whole-model' + Cache Manager ------------- diff --git a/tests/test_cache.py b/tests/test_cache.py index a033219..797ccd9 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -8,7 +8,7 @@ import mock from nose.tools import eq_ -from caching import base, invalidation +from caching import base, invalidation, config cache = invalidation.cache @@ -38,12 +38,12 @@ class CachingTestCase(TestCase): def setUp(self): cache.clear() - self.old_timeout = base.TIMEOUT + self.old_timeout = config.TIMEOUT if getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): invalidation.redis.flushall() def tearDown(self): - base.TIMEOUT = self.old_timeout + config.TIMEOUT = self.old_timeout def test_flush_key(self): """flush_key should work for objects or strings.""" @@ -166,7 +166,7 @@ def test_raw_cache_params(self): def test_raw_nocache(self, CacheMachine): base.TIMEOUT = 60 sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table - raw = list(Addon.objects.raw(sql, timeout=base.NO_CACHE)) + raw = list(Addon.objects.raw(sql, timeout=config.NO_CACHE)) eq_(len(raw), 1) raw_addon = raw[0] assert not hasattr(raw_addon, 'from_cache') @@ -174,13 +174,14 @@ def test_raw_nocache(self, CacheMachine): @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): - base.TIMEOUT = 60 + config.TIMEOUT = 60 cache_mock.scheme = 'memcached' cache_mock.get.return_value = None q = Addon.objects.all() q.count() + assert cache_mock.set.call_args, 'set not called' args, kwargs = cache_mock.set.call_args key, value, timeout = args eq_(value, 2) @@ -188,7 +189,7 @@ def test_count_cache(self, cache_mock): @mock.patch('caching.base.cached') def test_count_none_timeout(self, cached_mock): - base.TIMEOUT = base.NO_CACHE + config.TIMEOUT = config.NO_CACHE Addon.objects.count() eq_(cached_mock.call_count, 0) @@ -443,7 +444,7 @@ def test_empty_queryset(self): with self.assertNumQueries(k): eq_(len(Addon.objects.filter(pk=42)), 0) - @mock.patch('caching.base.CACHE_EMPTY_QUERYSETS', True) + @mock.patch('caching.config.CACHE_EMPTY_QUERYSETS', True) def test_cache_empty_queryset(self): for k in (1, 0): with self.assertNumQueries(k): @@ -455,7 +456,7 @@ def test_invalidate_empty_queryset(self): Addon.objects.create(val=42, author1=u, author2=u) eq_([a.val for a in u.addon_set.all()], [42]) - def test_invalidate_new_object(self): + def test_invalidate_new_related_object(self): u = User.objects.create() Addon.objects.create(val=42, author1=u, author2=u) eq_([a.val for a in u.addon_set.all()], [42]) @@ -501,3 +502,29 @@ def test_parse_backend_uri(self): host, params = parse_backend_uri(uri) self.assertEqual(host, '127.0.0.1:6379') self.assertEqual(params, {'socket_timeout': '5'}) + + @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', 'whole-model') + def test_invalidate_on_create_enabled(self): + """ Test that creating new objects invalidates cached queries for that model. """ + eq_([a.name for a in User.objects.all()], ['fliggy', 'clouseroo']) + User.objects.create(name='spam') + users = User.objects.all() + # our new user should show up and the query should not have come from the cache + eq_([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + assert not any([u.from_cache for u in users]) + # if we run it again, it should be cached this time + users = User.objects.all() + eq_([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + assert all([u.from_cache for u in User.objects.all()]) + + @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', None) + def test_invalidate_on_create_disabled(self): + """ + Test that creating new objects does NOT invalidate cached queries when + whole-model invalidation on create is disabled. + """ + users = User.objects.all() + assert users, "Can't run this test without some users" + assert not any([u.from_cache for u in users]) + User.objects.create(name='spam') + assert all([u.from_cache for u in User.objects.all()]) From e5697f211d26c8c1886a22009b8956fd0e776fd9 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 5 Jul 2015 21:45:45 -0400 Subject: [PATCH 121/214] correct flake8 errors --- caching/base.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/caching/base.py b/caching/base.py index ed7640c..5c661b3 100644 --- a/caching/base.py +++ b/caching/base.py @@ -2,7 +2,6 @@ import logging import django -from django.conf import settings from django.db import models from django.db.models import signals from django.db.models.sql import query, EmptyResultSet @@ -54,8 +53,8 @@ def invalidate(self, *objects, **kwargs): # contain anything in the cache, but its corresponding flush key will. is_new_instance = kwargs.pop('is_new_instance', False) model_cls = kwargs.pop('model_cls', None) - if config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and \ - is_new_instance and model_cls and hasattr(model_cls, 'model_key'): + if (config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and + is_new_instance and model_cls and hasattr(model_cls, 'model_key')): keys.append(model_cls.model_key()) invalidator.invalidate_keys(keys) From 13c9bfa8581179750e61e535e03e064cd68b3103 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 13:41:58 -0400 Subject: [PATCH 122/214] add tox.ini and pin mock to 1.0.1 (1.1+ not Python 2.6 compatible) --- .gitignore | 1 + requirements/base.txt | 2 +- tox.ini | 21 +++++++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index ea0208f..b346e5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .coverage +.tox docs/_build *.py[co] *.egg-info diff --git a/requirements/base.txt b/requirements/base.txt index 1960eaa..decc024 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,7 +1,7 @@ # These are the reqs to build docs and run tests. --no-binary :all: # workaround for https://bitbucket.org/ned/coveragepy/issue/382/pip-install-coverage-uses-slower-pytracer sphinx -mock +mock==1.0.1 django-nose jinja2 redis diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..b678d53 --- /dev/null +++ b/tox.ini @@ -0,0 +1,21 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + +[tox] +envlist = + py26-dj{14,15,16} + py27-dj{14,15,16,17,18} + py{33,34}-dj{15,16,17,18} + +[testenv] +commands = {envpython} run_tests.py +deps = + py{26,27}: -rrequirements/py2.txt + py{33,34}: -rrequirements/py3.txt + dj14: Django>=1.4,<1.5 + dj15: Django>=1.5,<1.6 + dj16: Django>=1.6,<1.7 + dj17: Django>=1.7,<1.8 + dj18: Django>=1.8,<1.9 From 12b72fce415abc7e465043c3a4896206145ff116 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 13:43:55 -0400 Subject: [PATCH 123/214] swap preferred requirements file to encourage Python 3 adoption :) --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 75a5c13..5d6f984 100644 --- a/README.rst +++ b/README.rst @@ -40,5 +40,5 @@ Get it from `github Date: Sat, 11 Jul 2015 13:51:04 -0400 Subject: [PATCH 124/214] get rid of basestring_; use Django's copy of six library for string_types --- caching/compat.py | 4 +--- caching/invalidation.py | 8 +++----- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/caching/compat.py b/caching/compat.py index 4261097..d248951 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -1,7 +1,7 @@ import sys import django -__all__ = ['DEFAULT_TIMEOUT', 'FOREVER', 'u', 'basestring_'] +__all__ = ['DEFAULT_TIMEOUT', 'FOREVER', 'u'] if django.VERSION[:2] >= (1, 6): @@ -17,8 +17,6 @@ def u(x): return codecs.unicode_escape_decode(x)[0] - basestring_ = basestring # flake8: noqa else: def u(x): return x - basestring_ = str diff --git a/caching/invalidation.py b/caching/invalidation.py index eb6a021..f505960 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -8,11 +8,9 @@ from django.conf import settings from django.core.cache import cache as default_cache from django.core.cache.backends.base import InvalidCacheBackendError -from django.utils import encoding, translation +from django.utils import encoding, translation, six from django.utils.six.moves.urllib.parse import parse_qsl -from .compat import basestring_ - try: import redis as redislib except ImportError: @@ -49,12 +47,12 @@ def make_key(k, with_locale=True): def flush_key(obj): """We put flush lists in the flush: namespace.""" - key = obj if isinstance(obj, basestring_) else obj.cache_key + key = obj if isinstance(obj, six.string_types) else obj.cache_key return FLUSH + make_key(key, with_locale=False) def byid(obj): - key = obj if isinstance(obj, basestring_) else obj.cache_key + key = obj if isinstance(obj, six.string_types) else obj.cache_key return make_key('byid:' + key) From 9d985d7fe62d674fb952c37fbb1cf8abae7a1fa6 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 13:59:11 -0400 Subject: [PATCH 125/214] restore & fix test_make_key_unicode --- caching/invalidation.py | 6 +++--- tests/test_cache.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/caching/invalidation.py b/caching/invalidation.py index f505960..f2c55d7 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -37,12 +37,12 @@ def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_text('%s:%s' % (CACHE_PREFIX, k)) + key = encoding.smart_bytes('%s:%s' % (CACHE_PREFIX, k)) if with_locale: - key += encoding.smart_text(translation.get_language()) + key += encoding.smart_bytes(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. - return hashlib.md5(encoding.smart_bytes(key)).hexdigest() + return hashlib.md5(key).hexdigest() def flush_key(obj): diff --git a/tests/test_cache.py b/tests/test_cache.py index f42fe93..12dbae1 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -465,7 +465,7 @@ def test_invalidate_new_object(self): def test_make_key_unicode(self): translation.activate('en-US') - f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e\xa6' + f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' # This would crash with a unicode error. base.make_key(f, with_locale=True) translation.deactivate() From 3f394243a1b79c631b002ff5cecb255abb6f4147 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:23:25 -0400 Subject: [PATCH 126/214] Add 'from __future__ import unicode_literals' to all modules. While not strictly required, it's still recommended; see https://docs.djangoproject.com/en/1.8/topics/python3/#unicode-literals. --- caching/__init__.py | 2 ++ caching/backends/locmem.py | 2 ++ caching/backends/memcached.py | 2 ++ caching/base.py | 10 ++++++---- caching/compat.py | 13 +++---------- caching/ext.py | 2 ++ caching/invalidation.py | 2 ++ tests/test_cache.py | 11 ++++++----- tests/testapp/models.py | 2 ++ 9 files changed, 27 insertions(+), 19 deletions(-) diff --git a/caching/__init__.py b/caching/__init__.py index ca9d612..35d8b9a 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,2 +1,4 @@ +from __future__ import unicode_literals + VERSION = ('0', '8', '1') __version__ = '.'.join(VERSION) diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py index eb991bd..5276c93 100644 --- a/caching/backends/locmem.py +++ b/caching/backends/locmem.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + import django from django.core.cache.backends import locmem diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index f81ff0e..f7eb6de 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + from django.core.cache.backends import memcached from caching.compat import DEFAULT_TIMEOUT diff --git a/caching/base.py b/caching/base.py index dbe9c03..938ccdb 100644 --- a/caching/base.py +++ b/caching/base.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + import functools import logging @@ -8,7 +10,7 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding -from .compat import DEFAULT_TIMEOUT, u +from .compat import DEFAULT_TIMEOUT from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -89,7 +91,7 @@ def query_key(self): master), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = u('qs:%s::db:%s') % (self.query_string, self.db) + query_db_string = 'qs:%s::db:%s' % (self.query_string, self.db) return make_key(query_db_string, with_locale=False) def __iter__(self): @@ -302,10 +304,10 @@ def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) except (AttributeError, EmptyResultSet): - log.warning(u('%r cannot be cached.') % encoding.smart_text(obj)) + log.warning('%r cannot be cached.' % encoding.smart_text(obj)) return f() - key = u('%s:%s') % tuple(map(encoding.smart_text, (f_key, obj_key))) + key = '%s:%s' % tuple(map(encoding.smart_text, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. invalidator.add_to_flush_list( {obj.flush_key(): [_function_cache_key(key)]}) diff --git a/caching/compat.py b/caching/compat.py index d248951..4cd2081 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -1,7 +1,9 @@ +from __future__ import unicode_literals + import sys import django -__all__ = ['DEFAULT_TIMEOUT', 'FOREVER', 'u'] +__all__ = ['DEFAULT_TIMEOUT', 'FOREVER'] if django.VERSION[:2] >= (1, 6): @@ -11,12 +13,3 @@ else: DEFAULT_TIMEOUT = None FOREVER = 0 - -if sys.version_info < (3,): - import codecs - - def u(x): - return codecs.unicode_escape_decode(x)[0] -else: - def u(x): - return x diff --git a/caching/ext.py b/caching/ext.py index b8b33bc..acdc226 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + from django.conf import settings from django.utils import encoding diff --git a/caching/invalidation.py b/caching/invalidation.py index f2c55d7..b88c534 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + import collections import functools import hashlib diff --git a/tests/test_cache.py b/tests/test_cache.py index 12dbae1..4284eb1 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + import django from django.conf import settings from django.test import TestCase @@ -8,7 +10,6 @@ from nose.tools import eq_ from caching import base, invalidation -from caching.compat import u cache = invalidation.cache @@ -342,10 +343,10 @@ def f(): eq_(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): - ustr = encoding.smart_bytes(u('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' - '\\u05d0\\u05d5\\u05e1\\u05e3')) + ustr = encoding.smart_bytes('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' + '\\u05d0\\u05d5\\u05e1\\u05e3') obj = mock.Mock() - obj.query_key.return_value = u('xxx') + obj.query_key.return_value = 'xxx' obj.flush_key.return_value = 'key' f = lambda: 1 eq_(base.cached_with(obj, f, 'adf:%s' % ustr), 1) @@ -429,7 +430,7 @@ def test_cache_machine_timeout(self, cache): eq_(kwargs, {'timeout': 12}) def test_unicode_key(self): - list(User.objects.filter(name=u('\\xfcmla\\xfct'))) + list(User.objects.filter(name='\\xfcmla\\xfct')) def test_empty_in(self): # Raised an exception before fixing #2. diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 2e88db6..7b2705c 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,3 +1,5 @@ +from __future__ import unicode_literals + from django.db import models import mock From 2caa699de5c42c7f89811862e0fa88f0b933521b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:24:00 -0400 Subject: [PATCH 127/214] add list trove classifiers for Python 3 to setup.py --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5287164..d0b6afa 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,12 @@ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', - 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries :: Python Modules', ] ) From 6be9d6ba26ffb917816dd21cc373b5727755d1be Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:26:35 -0400 Subject: [PATCH 128/214] remove unused import --- caching/compat.py | 1 - 1 file changed, 1 deletion(-) diff --git a/caching/compat.py b/caching/compat.py index 4cd2081..cd1731a 100644 --- a/caching/compat.py +++ b/caching/compat.py @@ -1,6 +1,5 @@ from __future__ import unicode_literals -import sys import django __all__ = ['DEFAULT_TIMEOUT', 'FOREVER'] From 63aee884f9b5764af573fa9d6d2efaa7fc0015ed Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:46:31 -0400 Subject: [PATCH 129/214] use Travis' container-based architecture --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 7305499..43026d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,3 +33,5 @@ matrix: env: DJANGO_SPEC="Django>=1.4,<1.5" - python: "3.4" env: DJANGO_SPEC="Django>=1.4,<1.5" +# Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. +sudo: False From a1f9632059a0660d7a1bccfa43e1a5a96c1e5490 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:48:11 -0400 Subject: [PATCH 130/214] add tox build envs for docs + flake8 --- tox.ini | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tox.ini b/tox.ini index b678d53..4ebbced 100644 --- a/tox.ini +++ b/tox.ini @@ -19,3 +19,21 @@ deps = dj16: Django>=1.6,<1.7 dj17: Django>=1.7,<1.8 dj18: Django>=1.8,<1.9 + +[testenv:docs] +basepython = python2.7 +deps = + Sphinx + Django +changedir = docs +commands = /usr/bin/make html + +[testenv:py27-flake8] +basepython = python2.7 +deps = flake8 +commands = flake8 + +[testenv:py34-flake8] +basepython = python3.4 +deps = flake8 +commands = flake8 From a73e7d5105c2066e24d152234c564b20dd524f75 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:54:46 -0400 Subject: [PATCH 131/214] make sure flake8 and docs tox envs run by default; add env vars so docs build doesn't raise warnings --- tox.ini | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tox.ini b/tox.ini index 4ebbced..d82f261 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,8 @@ envlist = py26-dj{14,15,16} py27-dj{14,15,16,17,18} py{33,34}-dj{15,16,17,18} + py{27,34}-flake8 + docs [testenv] commands = {envpython} run_tests.py @@ -25,6 +27,9 @@ basepython = python2.7 deps = Sphinx Django +setenv = + PYTHONPATH = {toxinidir}/examples/ + DJANGO_SETTINGS_MODULE = cache_machine.settings changedir = docs commands = /usr/bin/make html From 47c1c636d32941e1499c6af25e1761e80c530f85 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 14:55:17 -0400 Subject: [PATCH 132/214] use mock library included with Python 3.3+ if available --- requirements/base.txt | 1 - requirements/py2.txt | 1 + tests/test_cache.py | 13 ++++++++----- tests/testapp/models.py | 6 +++++- 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index decc024..deaff42 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,7 +1,6 @@ # These are the reqs to build docs and run tests. --no-binary :all: # workaround for https://bitbucket.org/ned/coveragepy/issue/382/pip-install-coverage-uses-slower-pytracer sphinx -mock==1.0.1 django-nose jinja2 redis diff --git a/requirements/py2.txt b/requirements/py2.txt index a9f6804..2d36793 100644 --- a/requirements/py2.txt +++ b/requirements/py2.txt @@ -1,2 +1,3 @@ -r base.txt python-memcached +mock==1.0.1 diff --git a/tests/test_cache.py b/tests/test_cache.py index 4284eb1..f422f79 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,20 +1,23 @@ from __future__ import unicode_literals import django +import jinja2 + from django.conf import settings from django.test import TestCase -from django.utils import translation, encoding +from django.utils import translation, encoding, six -import jinja2 -import mock +if six.PY3: + from unittest import mock +else: + import mock from nose.tools import eq_ from caching import base, invalidation +from .testapp.models import Addon, User cache = invalidation.cache -from .testapp.models import Addon, User - if django.get_version().startswith('1.3'): class settings_patch(object): def __init__(self, **kwargs): diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 7b2705c..93bc5cf 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,8 +1,12 @@ from __future__ import unicode_literals from django.db import models +from django.utils import six -import mock +if six.PY3: + from unittest import mock +else: + import mock from caching.base import CachingMixin, CachingManager, cached_method From c89d55f51e826dbdabf719f50af27b718dd385c8 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 15:04:49 -0400 Subject: [PATCH 133/214] clean up diff (remove unnecessary change) --- tests/test_cache.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index f422f79..59c18c0 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -346,13 +346,13 @@ def f(): eq_(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): - ustr = encoding.smart_bytes('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' - '\\u05d0\\u05d5\\u05e1\\u05e3') + u = encoding.smart_bytes('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' + '\\u05d0\\u05d5\\u05e1\\u05e3') obj = mock.Mock() obj.query_key.return_value = 'xxx' obj.flush_key.return_value = 'key' f = lambda: 1 - eq_(base.cached_with(obj, f, 'adf:%s' % ustr), 1) + eq_(base.cached_with(obj, f, 'adf:%s' % u), 1) def test_cached_method(self): a = Addon.objects.get(id=1) From 5b6ace8fd1e943c960975db4860f354e875c2448 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 15:05:24 -0400 Subject: [PATCH 134/214] explain why we don't run tox in .travis.yml --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 43026d5..876c545 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,9 @@ language: python services: - memcached - redis-server +# Use Travis' build matrix and exclude functions rather than running tox +# directly so that we can run the builds in parallel and get coverage reports +# for each Python/Django version combo python: - "2.6" - "2.7" From b9a074228c8c7300a1c31f7e321e131e6c1ec376 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 17:31:02 -0400 Subject: [PATCH 135/214] update README with current supported Python versions --- README.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 5d6f984..5a30567 100644 --- a/README.rst +++ b/README.rst @@ -17,8 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.4-1.8 and Python 2.6 and 2.7. Python 3.3 -and 3.4 support is in progress in the ``py3k`` branch. +Cache Machine works with Django 1.4-1.8 and Python 2.6, 2.7, 3.3 and 3.4. Installation From 0fe943a51f5e63e3a8749b172eb174880069997b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 11 Jul 2015 17:32:43 -0400 Subject: [PATCH 136/214] bump version to 0.9.0.dev1 --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 35d8b9a..4f35f62 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('0', '8', '1') +VERSION = ('0', '9', '0', 'dev1') __version__ = '.'.join(VERSION) From 96d7ffadd5ccada90e3fe0324854597e9f3a8ba5 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 29 Jul 2015 21:58:50 -0400 Subject: [PATCH 137/214] update release notes for 0.9 release --- docs/index.rst | 2 ++ docs/releases.rst | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index e510813..28eb041 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -90,6 +90,8 @@ By default cache machine will not cache empty querysets. To cache them:: CACHE_EMPTY_QUERYSETS = True +.. _object-creation: + Object creation ^^^^^^^^^^^^^^^ diff --git a/docs/releases.rst b/docs/releases.rst index 024118e..ab9b05c 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,10 +3,13 @@ Release Notes ================== -v0.9 (release date TBD) ------------------------ +v0.9 (2015-07-29) +----------------- - Support for Python 3 +- A new setting, ``CACHE_INVALIDATE_ON_CREATE``, which facilitates invalidation + when a new model object is created. For more information, see + :ref:`object-creation`. v0.8.1 (2015-07-03) ----------------------- From aa877d1cb42d2454d2a8e2beea9a9a03dbacfe36 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 29 Jul 2015 21:59:54 -0400 Subject: [PATCH 138/214] bump version --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 4f35f62..db90cdc 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('0', '9', '0', 'dev1') +VERSION = ('0', '9') __version__ = '.'.join(VERSION) From 83e3c839e66f0dbf8129db39b0b9dc01ff452373 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 11 Oct 2015 14:13:41 -0400 Subject: [PATCH 139/214] safely pickle CachingQuerySet.timeout when set to DEFAULT_TIMEOUT --- caching/base.py | 20 ++++++++++++++++++++ tests/test_cache.py | 25 ++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 28a515c..d6c23b1 100644 --- a/caching/base.py +++ b/caching/base.py @@ -138,10 +138,30 @@ def cache_objects(self, objects): class CachingQuerySet(models.query.QuerySet): + _default_timeout_pickle_key = '__DEFAULT_TIMEOUT__' + def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) self.timeout = DEFAULT_TIMEOUT + def __getstate__(self): + """ + Safely pickle our timeout if it's a DEFAULT_TIMEOUT. This is not needed + by cache-machine itself, but by application code that may re-cache objects + retrieved using cache-machine. + """ + state = dict() + state.update(self.__dict__) + if self.timeout == DEFAULT_TIMEOUT: + state['timeout'] = self._default_timeout_pickle_key + return state + + def __setstate__(self, state): + """ Safely unpickle our timeout if it's a DEFAULT_TIMEOUT. """ + self.__dict__.update(state) + if self.timeout == self._default_timeout_pickle_key: + self.timeout = DEFAULT_TIMEOUT + def flush_key(self): return flush_key(self.query_key()) diff --git a/tests/test_cache.py b/tests/test_cache.py index 042eaf6..f14fcc6 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -2,6 +2,7 @@ import django import jinja2 +import pickle from django.conf import settings from django.test import TestCase @@ -13,7 +14,7 @@ import mock from nose.tools import eq_ -from caching import base, invalidation, config +from caching import base, invalidation, config, compat from .testapp.models import Addon, User cache = invalidation.cache @@ -533,3 +534,25 @@ def test_invalidate_on_create_disabled(self): assert not any([u.from_cache for u in users]) User.objects.create(name='spam') assert all([u.from_cache for u in User.objects.all()]) + + def test_pickle_queryset(self): + """ + Test for CacheingQuerySet.__getstate__ and CachingQuerySet.__setstate__. + """ + # Make sure CachingQuerySet.timeout, when set to DEFAULT_TIMEOUT, can be safely + # pickled/unpickled on/from different Python processes which may have different + # underlying values for DEFAULT_TIMEOUT: + q1 = Addon.objects.all() + assert q1.timeout == compat.DEFAULT_TIMEOUT + pickled = pickle.dumps(q1) + new_timeout = object() + with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): + q2 = pickle.loads(pickled) + assert q2.timeout == new_timeout + # Make sure values other than DEFAULT_TIMEOUT remain unaffected: + q1 = Addon.objects.cache(10).all() + assert q1.timeout == 10 + pickled = pickle.dumps(q1) + with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): + q2 = pickle.loads(pickled) + assert q2.timeout == 10 From d73ecefad1170a6e0d625dfc2f696b48750373c0 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 11 Oct 2015 14:18:10 -0400 Subject: [PATCH 140/214] fix infinite caching for pre-Django 1.6; leave cache methods as-is for Django 1.6 and later --- caching/backends/memcached.py | 19 ++++++++++++++----- tests/test_cache.py | 14 +++++++++++++- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py index f7eb6de..f08f9bf 100644 --- a/caching/backends/memcached.py +++ b/caching/backends/memcached.py @@ -1,18 +1,27 @@ from __future__ import unicode_literals +import django from django.core.cache.backends import memcached from caching.compat import DEFAULT_TIMEOUT -# Add infinite timeout support to the memcached backend. +# Add infinite timeout support to the memcached backend, if needed. class InfinityMixin(object): - def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - return super(InfinityMixin, self).add(key, value, timeout, version) + if django.VERSION[:2] < (1, 6): + # Django 1.6 and later do it the right way already + def _get_memcache_timeout(self, timeout): + if timeout == 0: + return timeout + else: + return super(InfinityMixin, self)._get_memcache_timeout(timeout) - def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - return super(InfinityMixin, self).set(key, value, timeout, version) + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + return super(InfinityMixin, self).add(key, value, timeout, version) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + return super(InfinityMixin, self).set(key, value, timeout, version) class MemcachedCache(InfinityMixin, memcached.MemcachedCache): diff --git a/tests/test_cache.py b/tests/test_cache.py index 042eaf6..e8f32df 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -12,8 +12,9 @@ else: import mock from nose.tools import eq_ +from nose.plugins.skip import SkipTest -from caching import base, invalidation, config +from caching import base, invalidation, config, compat from .testapp.models import Addon, User cache = invalidation.cache @@ -404,6 +405,17 @@ def test_timeout_from_queryset(self): assert hasattr(a, 'from_cache') eq_(a.id, 1) + @mock.patch('memcache.Client.set') + def test_infinite_timeout(self, mock_set): + """ + Test that memcached infinite timeouts work with all Django versions. + """ + if not any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]): + raise SkipTest('This test requires that Django use memcache') + cache.set('foo', 'bar', timeout=compat.FOREVER) + # for memcached, 0 timeout means store forever + mock_set.assert_called_with(':1:foo', 'bar', 0) + def test_cache_and_no_cache(self): """Whatever happens last sticks.""" q = Addon.objects.no_cache().cache(12).filter(id=1) From 7465507abb1a3ff9a7180e87a234a15348806bde Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Mon, 12 Oct 2015 11:34:50 -0400 Subject: [PATCH 141/214] add test showing failed invalidation when using database replication --- .travis.yml | 4 ++ examples/cache_machine/settings.py | 11 +++-- tests/test_cache.py | 67 ++++++++++++++++++++---------- tests/testapp/models.py | 4 ++ 4 files changed, 61 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index 876c545..ab3c6bd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,10 @@ python: - "2.7" - "3.3" - "3.4" +addons: + postgresql: "9.4" +before_script: + - psql -c 'create database travis_ci_test;' -U postgres install: - pip install -U pip # make sure we have the latest version - pip install -e . diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 33f0e7a..946eff3 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -1,3 +1,5 @@ +import os + CACHES = { 'default': { 'BACKEND': 'caching.backends.memcached.MemcachedCache', @@ -9,12 +11,13 @@ DATABASES = { 'default': { - 'NAME': ':memory:', - 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.environ.get('TRAVIS') and 'travis_ci_test' or 'cache_machine_devel', + 'ENGINE': 'django.db.backends.postgresql_psycopg2', }, 'slave': { - 'NAME': 'test_slave.db', - 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': 'cache_machine_devel_slave', + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'TEST_MIRROR': 'default', # support older Django syntax for now }, } diff --git a/tests/test_cache.py b/tests/test_cache.py index f14fcc6..ed24fbe 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -3,9 +3,10 @@ import django import jinja2 import pickle +import logging from django.conf import settings -from django.test import TestCase +from django.test import TestCase, TransactionTestCase from django.utils import translation, encoding, six if six.PY3: @@ -18,6 +19,7 @@ from .testapp.models import Addon, User cache = invalidation.cache +log = logging.getLogger(__name__) if django.get_version().startswith('1.3'): class settings_patch(object): @@ -37,7 +39,6 @@ def __exit__(self, *args): class CachingTestCase(TestCase): - multi_db = True fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] extra_apps = ['tests.testapp'] @@ -482,25 +483,6 @@ def test_get_flush_lists_none(self, cache_mock): cache_mock.return_value.values.return_value = [None, [1]] eq_(base.invalidator.get_flush_lists(None), set([1])) - def test_multidb_cache(self): - """ Test where master and slave DB result in two different cache keys """ - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True - - from_slave = Addon.objects.using('slave').get(id=1) - assert from_slave.from_cache is False - assert from_slave._state.db == 'slave' - - def test_multidb_fetch_by_id(self): - """ Test where master and slave DB result in two different cache keys with FETCH_BY_ID""" - with self.settings(FETCH_BY_ID=True): - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True - - from_slave = Addon.objects.using('slave').get(id=1) - assert from_slave.from_cache is False - assert from_slave._state.db == 'slave' - def test_parse_backend_uri(self): """ Test that parse_backend_uri works as intended. Regression for #92. """ from caching.invalidation import parse_backend_uri @@ -556,3 +538,46 @@ def test_pickle_queryset(self): with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): q2 = pickle.loads(pickled) assert q2.timeout == 10 + + +# use TransactionTestCase so that ['TEST']['MIRROR'] setting works +# see https://code.djangoproject.com/ticket/23718 +class MultiDbTestCase(TransactionTestCase): + multi_db = True + fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] + extra_apps = ['tests.testapp'] + + def test_multidb_cache(self): + """ Test where master and slave DB result in two different cache keys """ + assert Addon.objects.get(id=1).from_cache is False + assert Addon.objects.get(id=1).from_cache is True + + from_slave = Addon.objects.using('slave').get(id=1) + assert from_slave.from_cache is False + assert from_slave._state.db == 'slave' + + def test_multidb_fetch_by_id(self): + """ Test where master and slave DB result in two different cache keys with FETCH_BY_ID""" + with self.settings(FETCH_BY_ID=True): + assert Addon.objects.get(id=1).from_cache is False + assert Addon.objects.get(id=1).from_cache is True + + from_slave = Addon.objects.using('slave').get(id=1) + assert from_slave.from_cache is False + assert from_slave._state.db == 'slave' + + def test_multidb_master_slave_invalidation(self): + """ Test saving an object on one DB invalidates it for all DBs """ + log.debug('priming the DB & cache') + master_obj = User.objects.using('default').create(name='new-test-user') + slave_obj = User.objects.using('slave').get(name='new-test-user') + assert slave_obj.from_cache is False + log.debug('deleting the original object') + User.objects.using('default').filter(pk=slave_obj.pk).delete() + log.debug('re-creating record with a new primary key') + master_obj = User.objects.using('default').create(name='new-test-user') + log.debug('attempting to force re-fetch from DB (should not use cache)') + slave_obj = User.objects.using('slave').get(name='new-test-user') + assert slave_obj.from_cache is False + eq_(slave_obj.pk, master_obj.pk) + assert False diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 93bc5cf..37fbdc6 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -28,6 +28,10 @@ class Addon(CachingMixin, models.Model): objects = CachingManager() + class Meta: + # without this, Postgres & SQLite return objects in different orders: + ordering = ('pk',) + @cached_method def calls(self, arg=1): """This is a docstring for calls()""" From 6b9ae06eea708a26c6f4022c4575716ce866550b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Mon, 12 Oct 2015 11:37:25 -0400 Subject: [PATCH 142/214] add psycopg2 to requirements --- requirements/base.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/base.txt b/requirements/base.txt index deaff42..8fc8122 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,3 +6,4 @@ jinja2 redis flake8 coverage +psycopg2 From 48b3320042da1bbf50dc8d48fb34f172f805d792 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 17 Oct 2015 09:37:59 -0400 Subject: [PATCH 143/214] point Travis status to master branch --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 5a30567..e78b4f8 100644 --- a/README.rst +++ b/README.rst @@ -7,7 +7,7 @@ through the ORM. For full docs, see https://cache-machine.readthedocs.org/en/latest/. -.. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.png +.. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.svg?branch=master :target: https://travis-ci.org/django-cache-machine/django-cache-machine .. image:: https://coveralls.io/repos/django-cache-machine/django-cache-machine/badge.svg?branch=master From d82d12fad0d3c98ad5e88d69414553adb0dad7df Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 17 Oct 2015 11:57:15 -0400 Subject: [PATCH 144/214] don't include db key in flush key; supports separate cache keys per DB while allowing invalidation of both master & slave cached objects when only one is modified or deleted --- caching/base.py | 51 ++++++++++++++++++++--------------------- caching/invalidation.py | 39 +++++++++++++++++++------------ tests/test_cache.py | 3 +-- 3 files changed, 50 insertions(+), 43 deletions(-) diff --git a/caching/base.py b/caching/base.py index d6c23b1..b6ddba1 100644 --- a/caching/base.py +++ b/caching/base.py @@ -49,16 +49,7 @@ def post_delete(self, instance, **kwargs): def invalidate(self, *objects, **kwargs): """Invalidate all the flush lists associated with ``objects``.""" - keys = [k for o in objects for k in o._cache_keys()] - # If whole-model invalidation on create is enabled, include this model's - # key in the list to be invalidated. Note that the key itself won't - # contain anything in the cache, but its corresponding flush key will. - is_new_instance = kwargs.pop('is_new_instance', False) - model_cls = kwargs.pop('model_cls', None) - if (config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and - is_new_instance and model_cls and hasattr(model_cls, 'model_key')): - keys.append(model_cls.model_key()) - invalidator.invalidate_keys(keys) + invalidator.invalidate_objects(objects, **kwargs) def raw(self, raw_query, params=None, *args, **kwargs): return CachingRawQuerySet(raw_query, self.model, params=params, @@ -107,7 +98,7 @@ def __iter__(self): # Try to fetch from the cache. cached = cache.get(query_key) if cached is not None: - log.debug('cache hit: %s' % self.query_string) + log.debug('cache hit: %s' % query_key) for obj in cached: obj.from_cache = True yield obj @@ -125,13 +116,14 @@ def __iter__(self): yield obj except StopIteration: if to_cache or config.CACHE_EMPTY_QUERYSETS: - self.cache_objects(to_cache) + self.cache_objects(to_cache, query_key) raise - def cache_objects(self, objects): + def cache_objects(self, objects, query_key): """Cache query_key => objects, then update the flush lists.""" - query_key = self.query_key() + log.debug('query_key: %s' % query_key) query_flush = flush_key(self.query_string) + log.debug('query_flush: %s' % query_flush) cache.add(query_key, objects, timeout=self.timeout) invalidator.cache_objects(self.model, objects, query_key, query_flush) @@ -259,38 +251,45 @@ class CachingMixin(object): def flush_key(self): return flush_key(self) - @property - def cache_key(self): + def get_cache_key(self, incl_db=True): """Return a cache key based on the object's primary key.""" - return self._cache_key(self.pk, self._state.db) + return self._cache_key(self.pk, incl_db and self._state.db or None) + cache_key = property(get_cache_key) @classmethod - def model_key(cls): + def model_flush_key(cls): """ Return a cache key for the entire model (used by invalidation). """ # use dummy PK and DB reference that will never resolve to an actual - # cache key for an objection - return cls._cache_key('all-pks', 'all-dbs') + # cache key for an object + return flush_key(cls._cache_key('all-pks', 'all-dbs')) @classmethod - def _cache_key(cls, pk, db): + def _cache_key(cls, pk, db=None): """ Return a string that uniquely identifies the object. For the Addon class, with a pk of 2, we get "o:addons.addon:2". """ - key_parts = ('o', cls._meta, pk, db) + if db: + key_parts = ('o', cls._meta, pk, db) + else: + key_parts = ('o', cls._meta, pk) return ':'.join(map(encoding.smart_text, key_parts)) - def _cache_keys(self): + def _cache_keys(self, incl_db=True): """Return the cache key for self plus all related foreign keys.""" fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields if isinstance(f, models.ForeignKey)) - - keys = [fk.rel.to._cache_key(val, self._state.db) for fk, val in list(fks.items()) + keys = [fk.rel.to._cache_key(val, incl_db and self._state.db or None) + for fk, val in list(fks.items()) if val is not None and hasattr(fk.rel.to, '_cache_key')] - return (self.cache_key,) + tuple(keys) + return (self.get_cache_key(incl_db=incl_db),) + tuple(keys) + + def _flush_keys(self): + """Return the flush key for self plus all related foreign keys.""" + return map(flush_key, self._cache_keys(incl_db=False)) class CachingRawQuerySet(models.query.RawQuerySet): diff --git a/caching/invalidation.py b/caching/invalidation.py index 08178a1..ee3aa28 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -46,7 +46,7 @@ def make_key(k, with_locale=True): def flush_key(obj): """We put flush lists in the flush: namespace.""" - key = obj if isinstance(obj, six.string_types) else obj.cache_key + key = obj if isinstance(obj, six.string_types) else obj.get_cache_key(incl_db=False) return config.FLUSH + make_key(key, with_locale=False) @@ -79,16 +79,24 @@ def wrapper(*args, **kw): class Invalidator(object): - def invalidate_keys(self, keys): - """Invalidate all the flush lists named by the list of ``keys``.""" - if not keys: + def invalidate_objects(self, objects, is_new_instance=False, model_cls=None): + """Invalidate all the flush lists for the given ``objects``.""" + obj_keys = [k for o in objects for k in o._cache_keys()] + flush_keys = [k for o in objects for k in o._flush_keys()] + # If whole-model invalidation on create is enabled, include this model's + # key in the list to be invalidated. Note that the key itself won't + # contain anything in the cache, but its corresponding flush key will. + if (config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and + is_new_instance and model_cls and hasattr(model_cls, 'model_flush_key')): + flush_keys.append(model_cls.model_flush_key()) + if not obj_keys or not flush_keys: return - obj_keys, flush_keys = self.find_flush_lists(keys) + obj_keys, flush_keys = self.expand_flush_lists(obj_keys, flush_keys) if obj_keys: - log.debug('obj_keys: %s' % obj_keys) + log.debug('deleting object keys: %s' % obj_keys) cache.delete_many(obj_keys) if flush_keys: - log.debug('flush_keys: %s' % flush_keys) + log.debug('clearing flush lists: %s' % flush_keys) self.clear_flush_lists(flush_keys) def cache_objects(self, model, objects, query_key, query_flush): @@ -103,13 +111,13 @@ def cache_objects(self, model, objects, query_key, query_flush): flush_lists[key].add(query_flush) flush_lists[query_flush].add(query_key) # Add this query to the flush key for the entire model, if enabled - model_flush = flush_key(model.model_key()) + model_flush = model.model_flush_key() if config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL: flush_lists[model_flush].add(query_key) # Add each object to the flush lists of its foreign keys. for obj in objects: obj_flush = obj.flush_key() - for key in map(flush_key, obj._cache_keys()): + for key in obj._flush_keys(): if key not in (obj_flush, model_flush): log.debug('related: adding %s to %s' % (obj_flush, key)) flush_lists[key].add(obj_flush) @@ -117,15 +125,16 @@ def cache_objects(self, model, objects, query_key, query_flush): flush_lists[key].add(byid(obj)) self.add_to_flush_list(flush_lists) - def find_flush_lists(self, keys): + def expand_flush_lists(self, obj_keys, flush_keys): """ Recursively search for flush lists and objects to invalidate. The search starts with the lists in `keys` and expands to any flush lists found therein. Returns ({objects to flush}, {flush keys found}). """ - objs = set(keys) - search_keys = keys = set(map(flush_key, keys)) + log.debug('in expand_flush_lists') + obj_keys = set(obj_keys) + search_keys = flush_keys = set(flush_keys) # Add other flush keys from the lists, which happens when a parent # object includes a foreign key. @@ -135,13 +144,13 @@ def find_flush_lists(self, keys): if key.startswith(config.FLUSH): new_keys.add(key) else: - objs.add(key) + obj_keys.add(key) if new_keys: log.debug('search for %s found keys %s' % (search_keys, new_keys)) - keys.update(new_keys) + flush_keys.update(new_keys) search_keys = new_keys else: - return objs, keys + return obj_keys, flush_keys def add_to_flush_list(self, mapping): """Update flush lists with the {flush_key: [query_key,...]} map.""" diff --git a/tests/test_cache.py b/tests/test_cache.py index ed24fbe..e27cfc9 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -54,7 +54,7 @@ def tearDown(self): def test_flush_key(self): """flush_key should work for objects or strings.""" a = Addon.objects.get(id=1) - eq_(base.flush_key(a.cache_key), base.flush_key(a)) + eq_(base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a)) def test_cache_key(self): a = Addon.objects.get(id=1) @@ -580,4 +580,3 @@ def test_multidb_master_slave_invalidation(self): slave_obj = User.objects.using('slave').get(name='new-test-user') assert slave_obj.from_cache is False eq_(slave_obj.pk, master_obj.pk) - assert False From c4ad945fc8070b5bb5476d0a59a6e4acf800e1e1 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 17 Oct 2015 12:14:12 -0400 Subject: [PATCH 145/214] comment explaining incl_db --- caching/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/caching/base.py b/caching/base.py index b6ddba1..9e0e324 100644 --- a/caching/base.py +++ b/caching/base.py @@ -253,6 +253,9 @@ def flush_key(self): def get_cache_key(self, incl_db=True): """Return a cache key based on the object's primary key.""" + # incl_db will be False if this key is intended for use in a flush key. + # This ensures all cached copies of an object will be invalidated + # regardless of the DB on which they're modified/deleted. return self._cache_key(self.pk, incl_db and self._state.db or None) cache_key = property(get_cache_key) From bf470c29a3c1ffed6f4d6322b599a7d3891c73d6 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sat, 17 Oct 2015 12:22:13 -0400 Subject: [PATCH 146/214] add release notes for 0.9.1 (forthcoming) --- docs/releases.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/releases.rst b/docs/releases.rst index ab9b05c..4a926b5 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,6 +3,17 @@ Release Notes ================== +v0.9.1 (TBD) +----------------- + +- Fix bug that prevented objects retrieved via cache machine from being + re-cached by application code (see PR #103) +- Fix bug that prevented caching objects forever when using Django <= 1.5 + (see PR #104) +- Fix regression (introduced in 0.8) that broke invalidation when an object + was cached via a slave database and later modified or deleted via the + master database, when using master/slave replication (see PR #105) + v0.9 (2015-07-29) ----------------- From 9deaa72a11c77d3489d59b6ab0447854046ded39 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 22 Oct 2015 16:11:15 -0400 Subject: [PATCH 147/214] add invalidation tests for sharding use case --- .travis.yml | 1 + examples/cache_machine/settings.py | 11 ++++++++- tests/test_cache.py | 37 ++++++++++++++++++++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ab3c6bd..1282ff8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,6 +14,7 @@ addons: postgresql: "9.4" before_script: - psql -c 'create database travis_ci_test;' -U postgres + - psql -c 'create database travis_ci_test2;' -U postgres install: - pip install -U pip # make sure we have the latest version - pip install -e . diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 946eff3..4cba276 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -15,10 +15,19 @@ 'ENGINE': 'django.db.backends.postgresql_psycopg2', }, 'slave': { - 'NAME': 'cache_machine_devel_slave', + 'NAME': 'cache_machine_devel', 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'TEST_MIRROR': 'default', # support older Django syntax for now }, + 'master2': { + 'NAME': os.environ.get('TRAVIS') and 'travis_ci_test2' or 'cache_machine_devel2', + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + }, + 'slave2': { + 'NAME': 'cache_machine_devel2', + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'TEST_MIRROR': 'master2', # support older Django syntax for now + }, } INSTALLED_APPS = ( diff --git a/tests/test_cache.py b/tests/test_cache.py index e27cfc9..30a1d7b 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -580,3 +580,40 @@ def test_multidb_master_slave_invalidation(self): slave_obj = User.objects.using('slave').get(name='new-test-user') assert slave_obj.from_cache is False eq_(slave_obj.pk, master_obj.pk) + + def test_multidb_no_db_crossover(self): + """ Test no crossover of objects with identical PKs """ + master_obj = User.objects.using('default').create(name='new-test-user') + master_obj2 = User.objects.using('master2').create(pk=master_obj.pk, name='other-test-user') + # prime the cache for the default DB + master_obj = User.objects.using('default').get(name='new-test-user') + assert master_obj.from_cache is False + master_obj = User.objects.using('default').get(name='new-test-user') + assert master_obj.from_cache is True + # prime the cache for the 2nd master DB + master_obj2 = User.objects.using('master2').get(name='other-test-user') + assert master_obj2.from_cache is False + master_obj2 = User.objects.using('master2').get(name='other-test-user') + assert master_obj2.from_cache is True + # ensure no crossover between databases + assert master_obj.name != master_obj2.name + + def test_multidb_sharding_no_invalidation(self): + """ Test for no invalidation when sharding w/distinct PKs""" + master_obj = User.objects.using('default').create(name='new-test-user') + # if pks are the same, objects *will* be invalidated across DBs + master_obj2 = User.objects.using('master2').create(pk=master_obj.pk+1, name='other-test-user') + # prime the cache for the default DB + master_obj = User.objects.using('default').get(name='new-test-user') + assert master_obj.from_cache is False + master_obj = User.objects.using('default').get(name='new-test-user') + assert master_obj.from_cache is True + # prime the cache for the 2nd master DB + master_obj2 = User.objects.using('master2').get(name='other-test-user') + assert master_obj2.from_cache is False + master_obj2 = User.objects.using('master2').get(name='other-test-user') + assert master_obj2.from_cache is True + # make sure master_obj2 query is not invalidated by a change to the 'default' db + master_obj.save() + master_obj2 = User.objects.using('master2').get(name='other-test-user') + assert master_obj2.from_cache is True From d4113683a6febefacaa92c113f444b9e4e3118b2 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 22 Oct 2015 16:22:16 -0400 Subject: [PATCH 148/214] fix flake8 errors --- tests/test_cache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index 30a1d7b..d10b3e6 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -602,8 +602,9 @@ def test_multidb_sharding_no_invalidation(self): """ Test for no invalidation when sharding w/distinct PKs""" master_obj = User.objects.using('default').create(name='new-test-user') # if pks are the same, objects *will* be invalidated across DBs - master_obj2 = User.objects.using('master2').create(pk=master_obj.pk+1, name='other-test-user') - # prime the cache for the default DB + master_obj2 = User.objects.using('master2').create(pk=master_obj.pk+1, + name='other-test-user') + # prime the cache for the default DB master_obj = User.objects.using('default').get(name='new-test-user') assert master_obj.from_cache is False master_obj = User.objects.using('default').get(name='new-test-user') From ae871b4a096ace0d72692f2a45fca771c807ec12 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 22 Oct 2015 17:19:32 -0400 Subject: [PATCH 149/214] remove pointless test (only way this would fail is if single object save flushed the whole cache, or there was some odd connection between completely unrelated objects that shared no attributes) --- tests/test_cache.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index d10b3e6..4312759 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -597,24 +597,3 @@ def test_multidb_no_db_crossover(self): assert master_obj2.from_cache is True # ensure no crossover between databases assert master_obj.name != master_obj2.name - - def test_multidb_sharding_no_invalidation(self): - """ Test for no invalidation when sharding w/distinct PKs""" - master_obj = User.objects.using('default').create(name='new-test-user') - # if pks are the same, objects *will* be invalidated across DBs - master_obj2 = User.objects.using('master2').create(pk=master_obj.pk+1, - name='other-test-user') - # prime the cache for the default DB - master_obj = User.objects.using('default').get(name='new-test-user') - assert master_obj.from_cache is False - master_obj = User.objects.using('default').get(name='new-test-user') - assert master_obj.from_cache is True - # prime the cache for the 2nd master DB - master_obj2 = User.objects.using('master2').get(name='other-test-user') - assert master_obj2.from_cache is False - master_obj2 = User.objects.using('master2').get(name='other-test-user') - assert master_obj2.from_cache is True - # make sure master_obj2 query is not invalidated by a change to the 'default' db - master_obj.save() - master_obj2 = User.objects.using('master2').get(name='other-test-user') - assert master_obj2.from_cache is True From 909b43445e23b725e70d11b06c87fff4f942adde Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 22 Oct 2015 17:25:31 -0400 Subject: [PATCH 150/214] version bump; update release notes --- caching/__init__.py | 2 +- docs/releases.rst | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/caching/__init__.py b/caching/__init__.py index db90cdc..88bdfc2 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('0', '9') +VERSION = ('0', '9', '1') __version__ = '.'.join(VERSION) diff --git a/docs/releases.rst b/docs/releases.rst index 4a926b5..a047f62 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,7 +3,7 @@ Release Notes ================== -v0.9.1 (TBD) +v0.9.1 (2015-10-22) ----------------- - Fix bug that prevented objects retrieved via cache machine from being @@ -12,7 +12,9 @@ v0.9.1 (TBD) (see PR #104) - Fix regression (introduced in 0.8) that broke invalidation when an object was cached via a slave database and later modified or deleted via the - master database, when using master/slave replication (see PR #105) + master database, when using master/slave replication (see PR #105). Note + this change may cause unexpected invalidation when sharding across DBs + that share both a schema and primary key values or other attributes. v0.9 (2015-07-29) ----------------- From 14d808be30e02ca53fd0af0789789ae2e173451a Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Wed, 10 Feb 2016 21:18:17 +1100 Subject: [PATCH 151/214] Convert raw `assert`'s to unittest framework self.assertX calls. Added fallback to unittest2 for Python 2.6. --- requirements/py2.txt | 1 + tests/test_cache.py | 295 ++++++++++++++++++++++--------------------- 2 files changed, 151 insertions(+), 145 deletions(-) diff --git a/requirements/py2.txt b/requirements/py2.txt index 2d36793..3bf2f67 100644 --- a/requirements/py2.txt +++ b/requirements/py2.txt @@ -1,3 +1,4 @@ -r base.txt python-memcached mock==1.0.1 +unittest2 diff --git a/tests/test_cache.py b/tests/test_cache.py index 408cd58..38ba4d8 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,24 +1,30 @@ from __future__ import unicode_literals - -import django -import jinja2 -import pickle import logging +import pickle +import sys + +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest +import django from django.conf import settings from django.test import TestCase, TransactionTestCase -from django.utils import translation, encoding, six +from django.utils import translation, encoding -if six.PY3: +if sys.version_info >= (3, ): from unittest import mock else: import mock -from nose.tools import eq_ -from nose.plugins.skip import SkipTest + +import jinja2 from caching import base, invalidation, config, compat + from .testapp.models import Addon, User + cache = invalidation.cache log = logging.getLogger(__name__) @@ -55,129 +61,129 @@ def tearDown(self): def test_flush_key(self): """flush_key should work for objects or strings.""" a = Addon.objects.get(id=1) - eq_(base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a)) + self.assertEqual(base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a)) def test_cache_key(self): a = Addon.objects.get(id=1) - eq_(a.cache_key, 'o:testapp.addon:1:default') + self.assertEqual(a.cache_key, 'o:testapp.addon:1:default') keys = set((a.cache_key, a.author1.cache_key, a.author2.cache_key)) - eq_(set(a._cache_keys()), keys) + self.assertEqual(set(a._cache_keys()), keys) def test_cache(self): """Basic cache test: second get comes from cache.""" - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) def test_filter_cache(self): - assert Addon.objects.filter(id=1)[0].from_cache is False - assert Addon.objects.filter(id=1)[0].from_cache is True + self.assertIs(Addon.objects.filter(id=1)[0].from_cache, False) + self.assertIs(Addon.objects.filter(id=1)[0].from_cache, True) def test_slice_cache(self): - assert Addon.objects.filter(id=1)[:1][0].from_cache is False - assert Addon.objects.filter(id=1)[:1][0].from_cache is True + self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, False) + self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, True) def test_invalidation(self): - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) a.save() - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) def test_invalidation_cross_locale(self): - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) # Do query & invalidation in a different locale. old_locale = translation.get_language() translation.activate('fr') - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is True + self.assertIs(a.from_cache, True) a.save() translation.activate(old_locale) - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] - assert a.from_cache is False + self.assertIs(a.from_cache, False) def test_fk_invalidation(self): """When an object is invalidated, its foreign keys get invalidated.""" a = Addon.objects.get(id=1) - assert User.objects.get(name='clouseroo').from_cache is False + self.assertIs(User.objects.get(name='clouseroo').from_cache, False) a.save() - assert User.objects.get(name='clouseroo').from_cache is False + self.assertIs(User.objects.get(name='clouseroo').from_cache, False) def test_fk_parent_invalidation(self): """When a foreign key changes, any parent objects get invalidated.""" - assert Addon.objects.get(id=1).from_cache is False + self.assertIs(Addon.objects.get(id=1).from_cache, False) a = Addon.objects.get(id=1) - assert a.from_cache is True + self.assertIs(a.from_cache, True) u = User.objects.get(id=a.author1.id) - assert u.from_cache is True + self.assertIs(u.from_cache, True) u.name = 'fffuuu' u.save() - assert User.objects.get(id=a.author1.id).from_cache is False + self.assertIs(User.objects.get(id=a.author1.id).from_cache, False) a = Addon.objects.get(id=1) - assert a.from_cache is False - eq_(a.author1.name, 'fffuuu') + self.assertIs(a.from_cache, False) + self.assertEqual(a.author1.name, 'fffuuu') def test_raw_cache(self): sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table raw = list(Addon.objects.raw(sql)) - eq_(len(raw), 1) + self.assertEqual(len(raw), 1) raw_addon = raw[0] a = Addon.objects.get(id=1) for field in Addon._meta.fields: - eq_(getattr(a, field.name), getattr(raw_addon, field.name)) - assert raw_addon.from_cache is False + self.assertEqual(getattr(a, field.name), getattr(raw_addon, field.name)) + self.assertIs(raw_addon.from_cache, False) cached = list(Addon.objects.raw(sql)) - eq_(len(cached), 1) + self.assertEqual(len(cached), 1) cached_addon = cached[0] a = Addon.objects.get(id=1) for field in Addon._meta.fields: - eq_(getattr(a, field.name), getattr(cached_addon, field.name)) - assert cached_addon.from_cache is True + self.assertEqual(getattr(a, field.name), getattr(cached_addon, field.name)) + self.assertIs(cached_addon.from_cache, True) def test_raw_cache_params(self): """Make sure the query params are included in the cache key.""" sql = 'SELECT * from %s WHERE id = %%s' % Addon._meta.db_table raw = list(Addon.objects.raw(sql, [1]))[0] - eq_(raw.id, 1) + self.assertEqual(raw.id, 1) raw2 = list(Addon.objects.raw(sql, [2]))[0] - eq_(raw2.id, 2) + self.assertEqual(raw2.id, 2) @mock.patch('caching.base.CacheMachine') def test_raw_nocache(self, CacheMachine): base.TIMEOUT = 60 sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table raw = list(Addon.objects.raw(sql, timeout=config.NO_CACHE)) - eq_(len(raw), 1) + self.assertEqual(len(raw), 1) raw_addon = raw[0] - assert not hasattr(raw_addon, 'from_cache') - assert not CacheMachine.called + self.assertFalse(hasattr(raw_addon, 'from_cache')) + self.assertFalse(CacheMachine.called) @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): @@ -188,23 +194,23 @@ def test_count_cache(self, cache_mock): q = Addon.objects.all() q.count() - assert cache_mock.set.call_args, 'set not called' + self.assertTrue(cache_mock.set.call_args, 'set not called') args, kwargs = cache_mock.set.call_args key, value, timeout = args - eq_(value, 2) - eq_(timeout, 60) + self.assertEqual(value, 2) + self.assertEqual(timeout, 60) @mock.patch('caching.base.cached') def test_count_none_timeout(self, cached_mock): config.TIMEOUT = config.NO_CACHE Addon.objects.count() - eq_(cached_mock.call_count, 0) + self.assertEqual(cached_mock.call_count, 0) @mock.patch('caching.base.cached') def test_count_nocache(self, cached_mock): base.TIMEOUT = 60 Addon.objects.no_cache().count() - eq_(cached_mock.call_count, 0) + self.assertEqual(cached_mock.call_count, 0) def test_queryset_flush_list(self): """Check that we're making a flush list for the queryset.""" @@ -214,8 +220,8 @@ def test_queryset_flush_list(self): cache.set('remove-me', 15) Addon.objects.invalidate(objects[0]) - assert cache.get(q.flush_key()) is None - assert cache.get('remove-me') is None + self.assertIs(cache.get(q.flush_key()), None) + self.assertIs(cache.get('remove-me'), None) def test_jinja_cache_tag_queryset(self): env = jinja2.Environment(extensions=['caching.ext.cache']) @@ -224,7 +230,7 @@ def check(q, expected): t = env.from_string( "{% cache q %}{% for x in q %}{{ x.id }}:{{ x.val }};" "{% endfor %}{% endcache %}") - eq_(t.render(q=q), expected) + self.assertEqual(t.render(q=q), expected) # Get the template in cache, then hijack iterator to make sure we're # hitting the cached fragment. @@ -232,7 +238,7 @@ def check(q, expected): qs = Addon.objects.all() qs.iterator = mock.Mock() check(qs, '1:42;2:42;') - assert not qs.iterator.called + self.assertFalse(qs.iterator.called) # Make changes, make sure we dropped the cached fragment. a = Addon.objects.get(id=1) @@ -241,7 +247,7 @@ def check(q, expected): q = Addon.objects.all() cache.get(q.flush_key()) - assert cache.get(q.flush_key()) is None + self.assertIs(cache.get(q.flush_key()), None) check(Addon.objects.all(), '1:17;2:42;') qs = Addon.objects.all() @@ -255,7 +261,7 @@ def test_jinja_cache_tag_object(self): def check(obj, expected): t = env.from_string( '{% cache obj, 30 %}{{ obj.id }}:{{ obj.val }}{% endcache %}') - eq_(t.render(obj=obj), expected) + self.assertEqual(t.render(obj=obj), expected) check(addon, '1:42') addon.val = 17 @@ -270,7 +276,7 @@ def test_jinja_multiple_tags(self): def check(obj, expected): t = env.from_string(template) - eq_(t.render(obj=obj), expected) + self.assertEqual(t.render(obj=obj), expected) check(addon, '1\n42') addon.val = 17 @@ -286,7 +292,7 @@ def test_jinja_cache_tag_extra(self): def check(obj, expected): t = env.from_string(template) - eq_(t.render(obj=obj), expected) + self.assertEqual(t.render(obj=obj), expected) addon.key = 1 check(addon, '1:1') @@ -311,33 +317,33 @@ def expensive(): f = lambda: base.cached_with(a, expensive, 'key') # Only gets called once. - eq_(f(), 1) - eq_(f(), 1) + self.assertEqual(f(), 1) + self.assertEqual(f(), 1) # Switching locales does not reuse the cache. old_locale = translation.get_language() translation.activate('fr') - eq_(f(), 2) + self.assertEqual(f(), 2) # Called again after flush. a.save() - eq_(f(), 3) + self.assertEqual(f(), 3) translation.activate(old_locale) - eq_(f(), 4) + self.assertEqual(f(), 4) counter.reset_mock() q = Addon.objects.filter(id=1) f = lambda: base.cached_with(q, expensive, 'key') # Only gets called once. - eq_(f(), 1) - eq_(f(), 1) + self.assertEqual(f(), 1) + self.assertEqual(f(), 1) # Called again after flush. list(q)[0].save() - eq_(f(), 2) - eq_(f(), 2) + self.assertEqual(f(), 2) + self.assertEqual(f(), 2) def test_cached_with_bad_object(self): """cached_with shouldn't fail if the object is missing a cache key.""" @@ -347,7 +353,7 @@ def f(): counter() return counter.call_count - eq_(base.cached_with([], f, 'key'), 1) + self.assertEqual(base.cached_with([], f, 'key'), 1) def test_cached_with_unicode(self): u = encoding.smart_bytes('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' @@ -356,64 +362,63 @@ def test_cached_with_unicode(self): obj.query_key.return_value = 'xxx' obj.flush_key.return_value = 'key' f = lambda: 1 - eq_(base.cached_with(obj, f, 'adf:%s' % u), 1) + self.assertEqual(base.cached_with(obj, f, 'adf:%s' % u), 1) def test_cached_method(self): a = Addon.objects.get(id=1) - eq_(a.calls(), (1, 1)) - eq_(a.calls(), (1, 1)) + self.assertEqual(a.calls(), (1, 1)) + self.assertEqual(a.calls(), (1, 1)) a.save() # Still returns 1 since the object has it's own local cache. - eq_(a.calls(), (1, 1)) - eq_(a.calls(3), (3, 2)) + self.assertEqual(a.calls(), (1, 1)) + self.assertEqual(a.calls(3), (3, 2)) a = Addon.objects.get(id=1) - eq_(a.calls(), (1, 3)) - eq_(a.calls(4), (4, 4)) - eq_(a.calls(3), (3, 2)) + self.assertEqual(a.calls(), (1, 3)) + self.assertEqual(a.calls(4), (4, 4)) + self.assertEqual(a.calls(3), (3, 2)) b = Addon.objects.create(id=5, val=32, author1_id=1, author2_id=2) - eq_(b.calls(), (1, 5)) + self.assertEqual(b.calls(), (1, 5)) # Make sure we're updating the wrapper's docstring. - eq_(b.calls.__doc__, Addon.calls.__doc__) + self.assertEqual(b.calls.__doc__, Addon.calls.__doc__) @mock.patch('caching.base.CacheMachine') def test_no_cache_from_manager(self, CacheMachine): a = Addon.objects.no_cache().get(id=1) - eq_(a.id, 1) - assert not hasattr(a, 'from_cache') - assert not CacheMachine.called + self.assertEqual(a.id, 1) + self.assertFalse(hasattr(a, 'from_cache')) + self.assertFalse(CacheMachine.called) @mock.patch('caching.base.CacheMachine') def test_no_cache_from_queryset(self, CacheMachine): a = Addon.objects.all().no_cache().get(id=1) - eq_(a.id, 1) - assert not hasattr(a, 'from_cache') - assert not CacheMachine.called + self.assertEqual(a.id, 1) + self.assertFalse(hasattr(a, 'from_cache')) + self.assertFalse(CacheMachine.called) def test_timeout_from_manager(self): q = Addon.objects.cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) a = q.get() - assert hasattr(a, 'from_cache') - eq_(a.id, 1) + self.assertTrue(hasattr(a, 'from_cache')) + self.assertEqual(a.id, 1) def test_timeout_from_queryset(self): q = Addon.objects.all().cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) a = q.get() - assert hasattr(a, 'from_cache') - eq_(a.id, 1) + self.assertTrue(hasattr(a, 'from_cache')) + self.assertEqual(a.id, 1) + @unittest.skipUnless(any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), 'This test requires that Django use memcache') @mock.patch('memcache.Client.set') def test_infinite_timeout(self, mock_set): """ Test that memcached infinite timeouts work with all Django versions. """ - if not any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]): - raise SkipTest('This test requires that Django use memcache') cache.set('foo', 'bar', timeout=compat.FOREVER) # for memcached, 0 timeout means store forever mock_set.assert_called_with(':1:foo', 'bar', 0) @@ -421,18 +426,18 @@ def test_infinite_timeout(self, mock_set): def test_cache_and_no_cache(self): """Whatever happens last sticks.""" q = Addon.objects.no_cache().cache(12).filter(id=1) - eq_(q.timeout, 12) + self.assertEqual(q.timeout, 12) no_cache = q.no_cache() # The querysets don't share anything. - eq_(q.timeout, 12) - assert no_cache.timeout != 12 + self.assertEqual(q.timeout, 12) + self.assertNotEqual(no_cache.timeout, 12) - assert not hasattr(no_cache.get(), 'from_cache') + self.assertFalse(hasattr(no_cache.get(), 'from_cache')) - eq_(q.get().id, 1) - assert hasattr(q.get(), 'from_cache') + self.assertEqual(q.get().id, 1) + self.assertTrue(hasattr(q.get(), 'from_cache')) @mock.patch('caching.base.cache') def test_cache_machine_timeout(self, cache): @@ -441,46 +446,46 @@ def test_cache_machine_timeout(self, cache): cache.get_many.return_value = {} a = Addon.objects.cache(12).get(id=1) - eq_(a.id, 1) + self.assertEqual(a.id, 1) - assert cache.add.called + self.assertTrue(cache.add.called) args, kwargs = cache.add.call_args - eq_(kwargs, {'timeout': 12}) + self.assertEqual(kwargs, {'timeout': 12}) def test_unicode_key(self): list(User.objects.filter(name='\\xfcmla\\xfct')) def test_empty_in(self): # Raised an exception before fixing #2. - eq_([], list(User.objects.filter(pk__in=[]))) + self.assertEqual([], list(User.objects.filter(pk__in=[]))) def test_empty_in_count(self): # Regression test for #14. - eq_(0, User.objects.filter(pk__in=[]).count()) + self.assertEqual(0, User.objects.filter(pk__in=[]).count()) def test_empty_queryset(self): for k in (1, 1): with self.assertNumQueries(k): - eq_(len(Addon.objects.filter(pk=42)), 0) + self.assertEqual(len(Addon.objects.filter(pk=42)), 0) @mock.patch('caching.config.CACHE_EMPTY_QUERYSETS', True) def test_cache_empty_queryset(self): for k in (1, 0): with self.assertNumQueries(k): - eq_(len(Addon.objects.filter(pk=42)), 0) + self.assertEqual(len(Addon.objects.filter(pk=42)), 0) def test_invalidate_empty_queryset(self): u = User.objects.create() - eq_(list(u.addon_set.all()), []) + self.assertEqual(list(u.addon_set.all()), []) Addon.objects.create(val=42, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42]) + self.assertEqual([a.val for a in u.addon_set.all()], [42]) def test_invalidate_new_related_object(self): u = User.objects.create() Addon.objects.create(val=42, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42]) + self.assertEqual([a.val for a in u.addon_set.all()], [42]) Addon.objects.create(val=17, author1=u, author2=u) - eq_([a.val for a in u.addon_set.all()], [42, 17]) + self.assertEqual([a.val for a in u.addon_set.all()], [42, 17]) def test_make_key_unicode(self): translation.activate('en-US') @@ -493,7 +498,7 @@ def test_make_key_unicode(self): def test_get_flush_lists_none(self, cache_mock): if not getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): cache_mock.return_value.values.return_value = [None, [1]] - eq_(base.invalidator.get_flush_lists(None), set([1])) + self.assertEqual(base.invalidator.get_flush_lists(None), set([1])) def test_parse_backend_uri(self): """ Test that parse_backend_uri works as intended. Regression for #92. """ @@ -506,16 +511,16 @@ def test_parse_backend_uri(self): @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', 'whole-model') def test_invalidate_on_create_enabled(self): """ Test that creating new objects invalidates cached queries for that model. """ - eq_([a.name for a in User.objects.all()], ['fliggy', 'clouseroo']) + self.assertEqual([a.name for a in User.objects.all()], ['fliggy', 'clouseroo']) User.objects.create(name='spam') users = User.objects.all() # our new user should show up and the query should not have come from the cache - eq_([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) - assert not any([u.from_cache for u in users]) + self.assertEqual([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + self.assertFalse(any([u.from_cache for u in users])) # if we run it again, it should be cached this time users = User.objects.all() - eq_([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) - assert all([u.from_cache for u in User.objects.all()]) + self.assertEqual([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + self.assertTrue(all([u.from_cache for u in User.objects.all()])) @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', None) def test_invalidate_on_create_disabled(self): @@ -524,10 +529,10 @@ def test_invalidate_on_create_disabled(self): whole-model invalidation on create is disabled. """ users = User.objects.all() - assert users, "Can't run this test without some users" - assert not any([u.from_cache for u in users]) + self.assertTrue(users, "Can't run this test without some users") + self.assertFalse(any([u.from_cache for u in users])) User.objects.create(name='spam') - assert all([u.from_cache for u in User.objects.all()]) + self.assertTrue(all([u.from_cache for u in User.objects.all()])) def test_pickle_queryset(self): """ @@ -537,19 +542,19 @@ def test_pickle_queryset(self): # pickled/unpickled on/from different Python processes which may have different # underlying values for DEFAULT_TIMEOUT: q1 = Addon.objects.all() - assert q1.timeout == compat.DEFAULT_TIMEOUT + self.assertEqual(q1.timeout, compat.DEFAULT_TIMEOUT) pickled = pickle.dumps(q1) new_timeout = object() with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): q2 = pickle.loads(pickled) - assert q2.timeout == new_timeout + self.assertEqual(q2.timeout, new_timeout) # Make sure values other than DEFAULT_TIMEOUT remain unaffected: q1 = Addon.objects.cache(10).all() - assert q1.timeout == 10 + self.assertEqual(q1.timeout, 10) pickled = pickle.dumps(q1) with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): q2 = pickle.loads(pickled) - assert q2.timeout == 10 + self.assertEqual(q2.timeout, 10) # use TransactionTestCase so that ['TEST']['MIRROR'] setting works @@ -561,37 +566,37 @@ class MultiDbTestCase(TransactionTestCase): def test_multidb_cache(self): """ Test where master and slave DB result in two different cache keys """ - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) from_slave = Addon.objects.using('slave').get(id=1) - assert from_slave.from_cache is False - assert from_slave._state.db == 'slave' + self.assertIs(from_slave.from_cache, False) + self.assertEqual(from_slave._state.db, 'slave') def test_multidb_fetch_by_id(self): """ Test where master and slave DB result in two different cache keys with FETCH_BY_ID""" with self.settings(FETCH_BY_ID=True): - assert Addon.objects.get(id=1).from_cache is False - assert Addon.objects.get(id=1).from_cache is True + self.assertIs(Addon.objects.get(id=1).from_cache, False) + self.assertIs(Addon.objects.get(id=1).from_cache, True) from_slave = Addon.objects.using('slave').get(id=1) - assert from_slave.from_cache is False - assert from_slave._state.db == 'slave' + self.assertIs(from_slave.from_cache, False) + self.assertEqual(from_slave._state.db, 'slave') def test_multidb_master_slave_invalidation(self): """ Test saving an object on one DB invalidates it for all DBs """ log.debug('priming the DB & cache') master_obj = User.objects.using('default').create(name='new-test-user') slave_obj = User.objects.using('slave').get(name='new-test-user') - assert slave_obj.from_cache is False + self.assertIs(slave_obj.from_cache, False) log.debug('deleting the original object') User.objects.using('default').filter(pk=slave_obj.pk).delete() log.debug('re-creating record with a new primary key') master_obj = User.objects.using('default').create(name='new-test-user') log.debug('attempting to force re-fetch from DB (should not use cache)') slave_obj = User.objects.using('slave').get(name='new-test-user') - assert slave_obj.from_cache is False - eq_(slave_obj.pk, master_obj.pk) + self.assertIs(slave_obj.from_cache, False) + self.assertEqual(slave_obj.pk, master_obj.pk) def test_multidb_no_db_crossover(self): """ Test no crossover of objects with identical PKs """ @@ -599,13 +604,13 @@ def test_multidb_no_db_crossover(self): master_obj2 = User.objects.using('master2').create(pk=master_obj.pk, name='other-test-user') # prime the cache for the default DB master_obj = User.objects.using('default').get(name='new-test-user') - assert master_obj.from_cache is False + self.assertIs(master_obj.from_cache, False) master_obj = User.objects.using('default').get(name='new-test-user') - assert master_obj.from_cache is True + self.assertIs(master_obj.from_cache, True) # prime the cache for the 2nd master DB master_obj2 = User.objects.using('master2').get(name='other-test-user') - assert master_obj2.from_cache is False + self.assertIs(master_obj2.from_cache, False) master_obj2 = User.objects.using('master2').get(name='other-test-user') - assert master_obj2.from_cache is True + self.assertIs(master_obj2.from_cache, True) # ensure no crossover between databases - assert master_obj.name != master_obj2.name + self.assertNotEqual(master_obj.name, master_obj2.name) From 1bf0455b2ab6d2694a4b1c7d8738eecb85010bdc Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Wed, 10 Feb 2016 21:42:14 +1100 Subject: [PATCH 152/214] Fix flake8 error. --- tests/test_cache.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index 38ba4d8..ba59173 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -413,7 +413,9 @@ def test_timeout_from_queryset(self): self.assertTrue(hasattr(a, 'from_cache')) self.assertEqual(a.id, 1) - @unittest.skipUnless(any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), 'This test requires that Django use memcache') + @unittest.skipUnless( + any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), + 'This test requires that Django use memcache') @mock.patch('memcache.Client.set') def test_infinite_timeout(self, mock_set): """ From b94ec75b0a8c858274009b6f0c67fe236a39509c Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Wed, 10 Feb 2016 22:14:21 +1100 Subject: [PATCH 153/214] Ignore newer flake8 checks so that travis builds correctly. --- .travis.yml | 2 +- caching/invalidation.py | 2 +- tests/test_cache.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1282ff8..fbf5ab1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,7 @@ install: - pip install coveralls script: - python run_tests.py --with-coverage - - flake8 . + - flake8 --ignore=E731,E402 . after_success: - coveralls env: diff --git a/caching/invalidation.py b/caching/invalidation.py index ee3aa28..6c75000 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -226,7 +226,7 @@ def parse_backend_uri(backend_uri): host = rest qpos = rest.find('?') if qpos != -1: - params = dict(parse_qsl(rest[qpos+1:])) + params = dict(parse_qsl(rest[qpos + 1:])) host = rest[:qpos] else: params = {} diff --git a/tests/test_cache.py b/tests/test_cache.py index ba59173..b3d64e9 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -414,8 +414,8 @@ def test_timeout_from_queryset(self): self.assertEqual(a.id, 1) @unittest.skipUnless( - any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), - 'This test requires that Django use memcache') + any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), + 'This test requires that Django use memcache') @mock.patch('memcache.Client.set') def test_infinite_timeout(self, mock_set): """ From bfca2e24243c01262049ff63cbd69751e53fcc48 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 15:32:10 -0700 Subject: [PATCH 154/214] add newer django+python to tox+travis --- .travis.yml | 16 ++++++++++++++++ tox.ini | 22 ++++++++++++++++++---- 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1282ff8..d00bf3a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ python: - "2.7" - "3.3" - "3.4" + - "3.5" addons: postgresql: "9.4" before_script: @@ -31,15 +32,30 @@ env: - DJANGO_SPEC="Django>=1.6,<1.7" - DJANGO_SPEC="Django>=1.7,<1.8" - DJANGO_SPEC="Django>=1.8,<1.9" + - DJANGO_SPEC="Django==1.9b1" matrix: exclude: - python: "2.6" env: DJANGO_SPEC="Django>=1.7,<1.8" - python: "2.6" env: DJANGO_SPEC="Django>=1.8,<1.9" + - python: "2.6" + env: DJANGO_SPEC="Django==1.9b1" - python: "3.3" env: DJANGO_SPEC="Django>=1.4,<1.5" + - python: "3.3" + env: DJANGO_SPEC="Django1.9b1" - python: "3.4" env: DJANGO_SPEC="Django>=1.4,<1.5" + - python: "3.5" + env: DJANGO_SPEC="Django>=1.4,<1.5" + - python: "3.5" + env: DJANGO_SPEC="Django>=1.5,<1.6" + - python: "3.5" + env: DJANGO_SPEC="Django>=1.6,<1.7" + - python: "3.5" + env: DJANGO_SPEC="Django>=1.7,<1.8" + - python: "3.5" + env: DJANGO_SPEC="Django>=1.8,<1.9" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False diff --git a/tox.ini b/tox.ini index d82f261..6e76039 100644 --- a/tox.ini +++ b/tox.ini @@ -5,22 +5,36 @@ [tox] envlist = - py26-dj{14,15,16} - py27-dj{14,15,16,17,18} - py{33,34}-dj{15,16,17,18} + dj14-py{26,27} + dj{15,16}-py{26,27,33,34} + dj{17,18}-py{27,33,34} + dj19-py{27,34,35} + dj110-py{27,34,35} +# py26-dj{14,15,16} +# py27-dj{14,15,16,17,18} +# py{33,34}-dj{15,16,17,18} +# py{27,34,35}-dj19 py{27,34}-flake8 docs +[testenv:py33] +basepython = if [ -x $(which pythonz) ]; then pythonz locate 3.3.6; else which python3.3; fi + +[testenv:py35] +basepython = if [ -x $(which pythonz) ]; then pythonz locate 3.5.1; else which python3.5; fi + [testenv] commands = {envpython} run_tests.py deps = py{26,27}: -rrequirements/py2.txt - py{33,34}: -rrequirements/py3.txt + py{33,34,35}: -rrequirements/py3.txt dj14: Django>=1.4,<1.5 dj15: Django>=1.5,<1.6 dj16: Django>=1.6,<1.7 dj17: Django>=1.7,<1.8 dj18: Django>=1.8,<1.9 + dj19: Django>=1.9,<1.10 + dj110: Django>=1.10a1,<1.11 [testenv:docs] basepython = python2.7 From 004261f4c0f53a124a41f40ef8ee884ca8e22648 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 15:47:10 -0700 Subject: [PATCH 155/214] convert assigned lambda funcs to def statements --- run_tests.py | 2 +- tests/test_cache.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/run_tests.py b/run_tests.py index 6bea3fb..5cdbb26 100644 --- a/run_tests.py +++ b/run_tests.py @@ -15,7 +15,7 @@ # Python 2.6 doesn't have check_output. Note this will not raise a CalledProcessError # like check_output does, but it should work for our purposes. import subprocess - check_output = lambda x: subprocess.Popen(x, stdout=subprocess.PIPE).communicate()[0] + def check_output(x): return subprocess.Popen(x, stdout=subprocess.PIPE).communicate()[0] NAME = os.path.basename(os.path.dirname(__file__)) ROOT = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/test_cache.py b/tests/test_cache.py index 408cd58..953e717 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -308,7 +308,7 @@ def expensive(): return counter.call_count a = Addon.objects.get(id=1) - f = lambda: base.cached_with(a, expensive, 'key') + def f(): return base.cached_with(a, expensive, 'key') # Only gets called once. eq_(f(), 1) @@ -328,7 +328,7 @@ def expensive(): counter.reset_mock() q = Addon.objects.filter(id=1) - f = lambda: base.cached_with(q, expensive, 'key') + def f(): return base.cached_with(q, expensive, 'key') # Only gets called once. eq_(f(), 1) @@ -355,7 +355,7 @@ def test_cached_with_unicode(self): obj = mock.Mock() obj.query_key.return_value = 'xxx' obj.flush_key.return_value = 'key' - f = lambda: 1 + def f(): return 1 eq_(base.cached_with(obj, f, 'adf:%s' % u), 1) def test_cached_method(self): From 23b28c62fc8a123e0d65f1b37a5ce61538d945fe Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 15:52:41 -0700 Subject: [PATCH 156/214] fix flake8 errors --- caching/base.py | 2 +- tests/test_cache.py | 24 +++++++++++++----------- tests/testapp/models.py | 3 +-- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/caching/base.py b/caching/base.py index 9e0e324..0e7fd8f 100644 --- a/caching/base.py +++ b/caching/base.py @@ -388,7 +388,7 @@ def __init__(self, obj, func): self.cache = {} def __call__(self, *args, **kwargs): - k = lambda o: o.cache_key if hasattr(o, 'cache_key') else o + def k(o): return o.cache_key if hasattr(o, 'cache_key') else o arg_keys = list(map(k, args)) kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())] key_parts = ('m', self.obj.cache_key, self.func.__name__, diff --git a/tests/test_cache.py b/tests/test_cache.py index 69832ee..5a86588 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,29 +1,28 @@ from __future__ import unicode_literals +import django +import jinja2 import logging import pickle import sys +from django.conf import settings +from django.test import TestCase, TransactionTestCase +from django.utils import translation, encoding + +from caching import base, invalidation, config, compat + +from .testapp.models import Addon, User + if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest -import django -from django.conf import settings -from django.test import TestCase, TransactionTestCase -from django.utils import translation, encoding - if sys.version_info >= (3, ): from unittest import mock else: import mock -import jinja2 - -from caching import base, invalidation, config, compat - -from .testapp.models import Addon, User - cache = invalidation.cache log = logging.getLogger(__name__) @@ -314,6 +313,7 @@ def expensive(): return counter.call_count a = Addon.objects.get(id=1) + def f(): return base.cached_with(a, expensive, 'key') # Only gets called once. @@ -334,6 +334,7 @@ def f(): return base.cached_with(a, expensive, 'key') counter.reset_mock() q = Addon.objects.filter(id=1) + def f(): return base.cached_with(q, expensive, 'key') # Only gets called once. @@ -361,6 +362,7 @@ def test_cached_with_unicode(self): obj = mock.Mock() obj.query_key.return_value = 'xxx' obj.flush_key.return_value = 'key' + def f(): return 1 self.assertEqual(base.cached_with(obj, f, 'adf:%s' % u), 1) diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 37fbdc6..1df219d 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -2,14 +2,13 @@ from django.db import models from django.utils import six +from caching.base import CachingMixin, CachingManager, cached_method if six.PY3: from unittest import mock else: import mock -from caching.base import CachingMixin, CachingManager, cached_method - # This global call counter will be shared among all instances of an Addon. call_counter = mock.Mock() From f12a36c055f009a1d303599fdc8a4e4cce32c3b4 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 16:25:27 -0700 Subject: [PATCH 157/214] make travis use tox --- .travis.yml | 61 ++++++++++++++++++++++++----------------------------- tox.ini | 16 ++++++-------- 2 files changed, 35 insertions(+), 42 deletions(-) diff --git a/.travis.yml b/.travis.yml index fd828a2..2d92858 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,43 +19,38 @@ before_script: install: - pip install -U pip # make sure we have the latest version - pip install -e . - - pip install -r requirements/py`echo $TRAVIS_PYTHON_VERSION|cut -d'.' -f1`.txt "$DJANGO_SPEC" + - pip install tox - pip install coveralls script: - - python run_tests.py --with-coverage - - flake8 --ignore=E731,E402 . + - tox -e $TOX_ENV + - flake8 after_success: - coveralls env: - - DJANGO_SPEC="Django>=1.4,<1.5" - - DJANGO_SPEC="Django>=1.5,<1.6" - - DJANGO_SPEC="Django>=1.6,<1.7" - - DJANGO_SPEC="Django>=1.7,<1.8" - - DJANGO_SPEC="Django>=1.8,<1.9" - - DJANGO_SPEC="Django==1.9b1" -matrix: - exclude: - - python: "2.6" - env: DJANGO_SPEC="Django>=1.7,<1.8" - - python: "2.6" - env: DJANGO_SPEC="Django>=1.8,<1.9" - - python: "2.6" - env: DJANGO_SPEC="Django==1.9b1" - - python: "3.3" - env: DJANGO_SPEC="Django>=1.4,<1.5" - - python: "3.3" - env: DJANGO_SPEC="Django1.9b1" - - python: "3.4" - env: DJANGO_SPEC="Django>=1.4,<1.5" - - python: "3.5" - env: DJANGO_SPEC="Django>=1.4,<1.5" - - python: "3.5" - env: DJANGO_SPEC="Django>=1.5,<1.6" - - python: "3.5" - env: DJANGO_SPEC="Django>=1.6,<1.7" - - python: "3.5" - env: DJANGO_SPEC="Django>=1.7,<1.8" - - python: "3.5" - env: DJANGO_SPEC="Django>=1.8,<1.9" + - TOX_ENV="dj14-py26" + - TOX_ENV="dj14-py27" + - TOX_ENV="dj15-py26" + - TOX_ENV="dj15-py27" + - TOX_ENV="dj15-py33" + - TOX_ENV="dj15-py34" + - TOX_ENV="dj16-py26" + - TOX_ENV="dj16-py27" + - TOX_ENV="dj16-py33" + - TOX_ENV="dj16-py34" + - TOX_ENV="dj17-py27" + - TOX_ENV="dj17-py33" + - TOX_ENV="dj17-py34" + - TOX_ENV="dj18-py27" + - TOX_ENV="dj18-py33" + - TOX_ENV="dj18-py34" + - TOX_ENV="dj19-py27" + - TOX_ENV="dj19-py34" + - TOX_ENV="dj19-py35" + - TOX_ENV="dj110-py27" + - TOX_ENV="dj110-py34" + - TOX_ENV="dj110-py35" + - TOX_ENV="py27-flake8" + - TOX_ENV="py35-flake8" + - TOX_ENV="docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False diff --git a/tox.ini b/tox.ini index 6e76039..83b85ae 100644 --- a/tox.ini +++ b/tox.ini @@ -8,20 +8,18 @@ envlist = dj14-py{26,27} dj{15,16}-py{26,27,33,34} dj{17,18}-py{27,33,34} - dj19-py{27,34,35} - dj110-py{27,34,35} -# py26-dj{14,15,16} -# py27-dj{14,15,16,17,18} -# py{33,34}-dj{15,16,17,18} -# py{27,34,35}-dj19 - py{27,34}-flake8 + dj{19,110}-py{27,34,35} + py{27,35}-flake8 docs [testenv:py33] -basepython = if [ -x $(which pythonz) ]; then pythonz locate 3.3.6; else which python3.3; fi +basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.3.6; else which python3.3; fi" + +[testenv:py34] +basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.4.3; else which python3.4; fi" [testenv:py35] -basepython = if [ -x $(which pythonz) ]; then pythonz locate 3.5.1; else which python3.5; fi +basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.5.1; else which python3.5; fi" [testenv] commands = {envpython} run_tests.py From d6c527e5fed078765064eb345a039135423a05fd Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 16:30:15 -0700 Subject: [PATCH 158/214] specify only one python since tox does that now --- .travis.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2d92858..edc93d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,14 +2,8 @@ language: python services: - memcached - redis-server -# Use Travis' build matrix and exclude functions rather than running tox -# directly so that we can run the builds in parallel and get coverage reports -# for each Python/Django version combo python: - - "2.6" - - "2.7" - - "3.3" - - "3.4" +# python selected by tox, so specify only one version here - "3.5" addons: postgresql: "9.4" From aed2defd22884e009cd63723aca03344eb999d04 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 16:42:47 -0700 Subject: [PATCH 159/214] pass TRAVIS environment variable into tox environment --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 83b85ae..47d3c1f 100644 --- a/tox.ini +++ b/tox.ini @@ -23,6 +23,7 @@ basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.5.1; else which [testenv] commands = {envpython} run_tests.py +passenv = TRAVIS deps = py{26,27}: -rrequirements/py2.txt py{33,34,35}: -rrequirements/py3.txt From 615585bca9f8c03d0a48ab98bb8f889b06993a1d Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 3 Jun 2016 17:07:12 -0700 Subject: [PATCH 160/214] don't run flake8 as part of travis, use tox --- .travis.yml | 1 - tox.ini | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index edc93d5..41323bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,6 @@ install: - pip install coveralls script: - tox -e $TOX_ENV - - flake8 after_success: - coveralls env: diff --git a/tox.ini b/tox.ini index 47d3c1f..a3b524d 100644 --- a/tox.ini +++ b/tox.ini @@ -51,7 +51,7 @@ basepython = python2.7 deps = flake8 commands = flake8 -[testenv:py34-flake8] -basepython = python3.4 +[testenv:py35-flake8] +basepython = python3.5 deps = flake8 commands = flake8 From dcc81ab73ff08d3a823d4488845a5c43228f5c16 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 21:45:31 -0400 Subject: [PATCH 161/214] Update tox and travis --- .travis.yml | 33 +++++++-------------------------- docs/conf.py | 3 ++- run_tests.py | 10 +--------- tox.ini | 38 +++++++++++++------------------------- 4 files changed, 23 insertions(+), 61 deletions(-) diff --git a/.travis.yml b/.travis.yml index 41323bf..f31fff0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,9 +4,9 @@ services: - redis-server python: # python selected by tox, so specify only one version here - - "3.5" + - "3.6" addons: - postgresql: "9.4" + postgresql: "9.5" before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database travis_ci_test2;' -U postgres @@ -20,30 +20,11 @@ script: after_success: - coveralls env: - - TOX_ENV="dj14-py26" - - TOX_ENV="dj14-py27" - - TOX_ENV="dj15-py26" - - TOX_ENV="dj15-py27" - - TOX_ENV="dj15-py33" - - TOX_ENV="dj15-py34" - - TOX_ENV="dj16-py26" - - TOX_ENV="dj16-py27" - - TOX_ENV="dj16-py33" - - TOX_ENV="dj16-py34" - - TOX_ENV="dj17-py27" - - TOX_ENV="dj17-py33" - - TOX_ENV="dj17-py34" - - TOX_ENV="dj18-py27" - - TOX_ENV="dj18-py33" - - TOX_ENV="dj18-py34" - - TOX_ENV="dj19-py27" - - TOX_ENV="dj19-py34" - - TOX_ENV="dj19-py35" - - TOX_ENV="dj110-py27" - - TOX_ENV="dj110-py34" - - TOX_ENV="dj110-py35" - - TOX_ENV="py27-flake8" - - TOX_ENV="py35-flake8" + - TOX_ENV="dj18-py27,dj18-py34,dj18-py35" + # - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" + # - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" + # - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" + # - TOX_ENV="py27-flake8,py36-flake8" - TOX_ENV="docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False diff --git a/docs/conf.py b/docs/conf.py index 71f3df3..3341ac5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,9 +1,10 @@ import os import sys +import caching + sys.path.append(os.path.abspath('..')) -import caching # The suffix of source filenames. source_suffix = '.rst' diff --git a/run_tests.py b/run_tests.py index 5cdbb26..d33f08f 100644 --- a/run_tests.py +++ b/run_tests.py @@ -7,15 +7,7 @@ import os import sys import argparse - -from subprocess import call -try: - from subprocess import check_output -except ImportError: - # Python 2.6 doesn't have check_output. Note this will not raise a CalledProcessError - # like check_output does, but it should work for our purposes. - import subprocess - def check_output(x): return subprocess.Popen(x, stdout=subprocess.PIPE).communicate()[0] +from subprocess import call, check_output NAME = os.path.basename(os.path.dirname(__file__)) ROOT = os.path.abspath(os.path.dirname(__file__)) diff --git a/tox.ini b/tox.ini index a3b524d..0035638 100644 --- a/tox.ini +++ b/tox.ini @@ -5,38 +5,28 @@ [tox] envlist = - dj14-py{26,27} - dj{15,16}-py{26,27,33,34} - dj{17,18}-py{27,33,34} - dj{19,110}-py{27,34,35} - py{27,35}-flake8 + dj{18}-py{27,34,35} + dj{19,110,111}-py{27,34,35,36} + py{27,36}-flake8 docs -[testenv:py33] -basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.3.6; else which python3.3; fi" - -[testenv:py34] -basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.4.3; else which python3.4; fi" - -[testenv:py35] -basepython = "if [ -x $(which pythonz) ]; then pythonz locate 3.5.1; else which python3.5; fi" - [testenv] +basepython = + py27: python2.7 + py34: python3.4 + py35: python3.5 + py36: python3.6 commands = {envpython} run_tests.py -passenv = TRAVIS deps = py{26,27}: -rrequirements/py2.txt - py{33,34,35}: -rrequirements/py3.txt - dj14: Django>=1.4,<1.5 - dj15: Django>=1.5,<1.6 - dj16: Django>=1.6,<1.7 - dj17: Django>=1.7,<1.8 + py{34,35,36}: -rrequirements/py3.txt dj18: Django>=1.8,<1.9 dj19: Django>=1.9,<1.10 - dj110: Django>=1.10a1,<1.11 + dj110: Django>=1.10,<1.11 + dj111: Django>=1.11,<2.0 [testenv:docs] -basepython = python2.7 +basepython = python3.6 deps = Sphinx Django @@ -47,11 +37,9 @@ changedir = docs commands = /usr/bin/make html [testenv:py27-flake8] -basepython = python2.7 deps = flake8 commands = flake8 -[testenv:py35-flake8] -basepython = python3.5 +[testenv:py36-flake8] deps = flake8 commands = flake8 From 26680940e4bace73121fd8e98ba3ada5ce14e28a Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 21:48:53 -0400 Subject: [PATCH 162/214] Include flake8 on travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f31fff0..898ba66 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,7 +24,7 @@ env: # - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" # - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" # - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" - # - TOX_ENV="py27-flake8,py36-flake8" + - TOX_ENV="py27-flake8,py36-flake8" - TOX_ENV="docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False From d450cffb6f4ecf18fdd9e80b548e7b3a3f43dbed Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 22:13:04 -0400 Subject: [PATCH 163/214] Workaround Travis bug --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 898ba66..03dca3a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,9 @@ python: - "3.6" addons: postgresql: "9.5" +before_install: + # work around https://github.com/travis-ci/travis-ci/issues/8363 + - pyenv global system 3.5 before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database travis_ci_test2;' -U postgres From 77d21e862f46a392baedb87acde7ffc7d3d73d49 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 22:29:20 -0400 Subject: [PATCH 164/214] Try to get coverage working --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 0035638..56232f4 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ basepython = py34: python3.4 py35: python3.5 py36: python3.6 -commands = {envpython} run_tests.py +commands = {envpython} run_tests.py --with-coverage deps = py{26,27}: -rrequirements/py2.txt py{34,35,36}: -rrequirements/py3.txt From 79281be526d333f71839c1156d04f6dc192bf347 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 23:04:42 -0400 Subject: [PATCH 165/214] Remove support for Python < 2.7, Django < 1.8 --- README.rst | 6 +-- caching/backends/__init__.py | 0 caching/backends/locmem.py | 43 ----------------- caching/backends/memcached.py | 32 ------------- caching/base.py | 6 +-- caching/compat.py | 14 ------ caching/invalidation.py | 13 ++---- docs/index.rst | 56 ++--------------------- examples/cache_machine/custom_backend.py | 2 +- examples/cache_machine/locmem_settings.py | 2 +- examples/cache_machine/settings.py | 2 +- requirements/base.txt | 1 - setup.py | 4 +- tests/test_cache.py | 29 ++---------- 14 files changed, 19 insertions(+), 191 deletions(-) delete mode 100644 caching/backends/__init__.py delete mode 100644 caching/backends/locmem.py delete mode 100644 caching/backends/memcached.py delete mode 100644 caching/compat.py diff --git a/README.rst b/README.rst index e78b4f8..8a61283 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.4-1.8 and Python 2.6, 2.7, 3.3 and 3.4. +Cache Machine works with Django 1.8 and Python 2.7, 3.4, 3.5 and 3.6. Installation @@ -27,10 +27,6 @@ Get it from `pypi `_:: pip install django-cache-machine -or `github `_:: - - pip install -e git://github.com/django-cache-machine/django-cache-machine.git#egg=django-cache-machine - Running Tests ------------- diff --git a/caching/backends/__init__.py b/caching/backends/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/caching/backends/locmem.py b/caching/backends/locmem.py deleted file mode 100644 index 5276c93..0000000 --- a/caching/backends/locmem.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import unicode_literals - -import django -from django.core.cache.backends import locmem - -from caching.compat import DEFAULT_TIMEOUT, FOREVER - - -if django.VERSION[:2] >= (1, 6): - Infinity = FOREVER -else: - class _Infinity(object): - """Always compares greater than numbers.""" - - def __radd__(self, _): - return self - - def __cmp__(self, o): - return 0 if self is o else 1 - - def __repr__(self): - return 'Infinity' - - Infinity = _Infinity() - del _Infinity - - -# Add infinite timeout support to the locmem backend. Useful for testing. -class InfinityMixin(object): - - def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - if timeout == FOREVER: - timeout = Infinity - return super(InfinityMixin, self).add(key, value, timeout, version) - - def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - if timeout == FOREVER: - timeout = Infinity - return super(InfinityMixin, self).set(key, value, timeout, version) - - -class LocMemCache(InfinityMixin, locmem.LocMemCache): - pass diff --git a/caching/backends/memcached.py b/caching/backends/memcached.py deleted file mode 100644 index f08f9bf..0000000 --- a/caching/backends/memcached.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import unicode_literals - -import django -from django.core.cache.backends import memcached - -from caching.compat import DEFAULT_TIMEOUT - - -# Add infinite timeout support to the memcached backend, if needed. -class InfinityMixin(object): - - if django.VERSION[:2] < (1, 6): - # Django 1.6 and later do it the right way already - def _get_memcache_timeout(self, timeout): - if timeout == 0: - return timeout - else: - return super(InfinityMixin, self)._get_memcache_timeout(timeout) - - def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - return super(InfinityMixin, self).add(key, value, timeout, version) - - def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): - return super(InfinityMixin, self).set(key, value, timeout, version) - - -class MemcachedCache(InfinityMixin, memcached.MemcachedCache): - pass - - -class PyLibMCCache(InfinityMixin, memcached.PyLibMCCache): - pass diff --git a/caching/base.py b/caching/base.py index 0e7fd8f..57853dc 100644 --- a/caching/base.py +++ b/caching/base.py @@ -3,14 +3,13 @@ import functools import logging -import django +from django.core.cache.backends.base import DEFAULT_TIMEOUT from django.db import models from django.db.models import signals from django.db.models.sql import query, EmptyResultSet from django.utils import encoding from caching import config -from .compat import DEFAULT_TIMEOUT from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -32,9 +31,6 @@ class CachingManager(models.Manager): def get_queryset(self): return CachingQuerySet(self.model, using=self._db) - if django.VERSION < (1, 6): - get_query_set = get_queryset - def contribute_to_class(self, cls, name): signals.post_save.connect(self.post_save, sender=cls) signals.post_delete.connect(self.post_delete, sender=cls) diff --git a/caching/compat.py b/caching/compat.py deleted file mode 100644 index cd1731a..0000000 --- a/caching/compat.py +++ /dev/null @@ -1,14 +0,0 @@ -from __future__ import unicode_literals - -import django - -__all__ = ['DEFAULT_TIMEOUT', 'FOREVER'] - - -if django.VERSION[:2] >= (1, 6): - from django.core.cache.backends.base import DEFAULT_TIMEOUT as DJANGO_DEFAULT_TIMEOUT - DEFAULT_TIMEOUT = DJANGO_DEFAULT_TIMEOUT - FOREVER = None -else: - DEFAULT_TIMEOUT = None - FOREVER = 0 diff --git a/caching/invalidation.py b/caching/invalidation.py index 6c75000..360c80e 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -6,13 +6,15 @@ import logging import socket -import django from django.conf import settings from django.core.cache import cache as default_cache +from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation, six from django.utils.six.moves.urllib.parse import parse_qsl +from caching import config + try: import redis as redislib except ImportError: @@ -20,17 +22,10 @@ # Look for an own cache first before falling back to the default cache try: - if django.VERSION[:2] >= (1, 7): - from django.core.cache import caches - cache = caches['cache_machine'] - else: - from django.core.cache import get_cache - cache = get_cache('cache_machine') + cache = caches['cache_machine'] except (InvalidCacheBackendError, ValueError): cache = default_cache -from caching import config - log = logging.getLogger('caching.invalidation') diff --git a/docs/index.rst b/docs/index.rst index 28eb041..aff85a7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,59 +14,9 @@ affect you, please see the :ref:`release-notes`. Settings -------- -Before we start, you'll have to update your ``settings.py`` to use one of the -caching backends provided by Cache Machine. Prior to Django 1.6, Django's -built-in caching backends did not allow for infinite cache timeouts, -which are critical for doing invalidation (see below). Cache Machine extends -the ``locmem`` and ``memcached`` backends provided by Django to enable -indefinite caching when a timeout of ``caching.base.FOREVER`` is -passed. If you were already using one of these backends, you can probably go -on using them just as you were. - -With Django 1.4 or higher, you should use the ``CACHES`` setting:: - - CACHES = { - 'default': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', - 'LOCATION': [ - 'server-1:11211', - 'server-2:11211', - ], - 'KEY_PREFIX': 'weee:', - }, - } - -Note that we have to specify the class, not the module, for the ``BACKEND`` -property, and that the ``KEY_PREFIX`` is optional. The ``LOCATION`` may be a -string, instead of a list, if you only have one server. - -If you require the default cache backend to be a different type of -cache backend or want Cache Machine to use specific cache server -options simply define a separate ``cache_machine`` entry for the -``CACHES`` setting, e.g.:: - - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'server-1:11211', - }, - 'cache_machine': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', - 'LOCATION': [ - 'server-1:11211', - 'server-2:11211', - ], - 'KEY_PREFIX': 'weee:', - }, - } - -.. note:: - - Cache Machine also supports the other memcache backend support by - Django >= 1.4 based on pylibmc_: - ``caching.backends.memcached.PyLibMCCache``. - -.. _pylibmc: http://sendapatch.se/projects/pylibmc/ +Older versions of Cache Machine required you to use customized cache backends. As of Django 1.6, +these are no longer needed and they have been removed from Cache Machine. Use the standard Django +cache backends. COUNT queries ^^^^^^^^^^^^^ diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index 53e2789..c9030d0 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -5,7 +5,7 @@ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'cache_machine': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', + 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', }, } diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index bfe4c62..8ed4fba 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -2,6 +2,6 @@ CACHES = { 'default': { - 'BACKEND': 'caching.backends.locmem.LocMemCache', + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, } diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 4cba276..2b6027f 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -2,7 +2,7 @@ CACHES = { 'default': { - 'BACKEND': 'caching.backends.memcached.MemcachedCache', + 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', }, } diff --git a/requirements/base.txt b/requirements/base.txt index 8fc8122..f9a98f4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,5 +1,4 @@ # These are the reqs to build docs and run tests. ---no-binary :all: # workaround for https://bitbucket.org/ned/coveragepy/issue/382/pip-install-coverage-uses-slower-pytracer sphinx django-nose jinja2 diff --git a/setup.py b/setup.py index d0b6afa..f6559e8 100644 --- a/setup.py +++ b/setup.py @@ -26,11 +26,11 @@ 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ] ) diff --git a/tests/test_cache.py b/tests/test_cache.py index 5a86588..d59173a 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,22 +1,19 @@ from __future__ import unicode_literals -import django import jinja2 import logging import pickle import sys +import unittest from django.conf import settings +from django.core.cache.backends.base import DEFAULT_TIMEOUT from django.test import TestCase, TransactionTestCase from django.utils import translation, encoding -from caching import base, invalidation, config, compat +from caching import base, invalidation, config from .testapp.models import Addon, User -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest if sys.version_info >= (3, ): from unittest import mock @@ -27,22 +24,6 @@ cache = invalidation.cache log = logging.getLogger(__name__) -if django.get_version().startswith('1.3'): - class settings_patch(object): - def __init__(self, **kwargs): - self.options = kwargs - - def __enter__(self): - self._old_settings = dict((k, getattr(settings, k, None)) for k in self.options) - for k, v in list(self.options.items()): - setattr(settings, k, v) - - def __exit__(self, *args): - for k in self.options: - setattr(settings, k, self._old_settings[k]) - - TestCase.settings = settings_patch - class CachingTestCase(TestCase): fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] @@ -423,7 +404,7 @@ def test_infinite_timeout(self, mock_set): """ Test that memcached infinite timeouts work with all Django versions. """ - cache.set('foo', 'bar', timeout=compat.FOREVER) + cache.set('foo', 'bar', timeout=None) # for memcached, 0 timeout means store forever mock_set.assert_called_with(':1:foo', 'bar', 0) @@ -546,7 +527,7 @@ def test_pickle_queryset(self): # pickled/unpickled on/from different Python processes which may have different # underlying values for DEFAULT_TIMEOUT: q1 = Addon.objects.all() - self.assertEqual(q1.timeout, compat.DEFAULT_TIMEOUT) + self.assertEqual(q1.timeout, DEFAULT_TIMEOUT) pickled = pickle.dumps(q1) new_timeout = object() with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): From 36589e98655289c15ecd71180daaa0f2b09e402e Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 23:08:25 -0400 Subject: [PATCH 166/214] Remove obsolete package --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f6559e8..519a5b7 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ author_email='jbalogh@mozilla.com', url='http://github.com/django-cache-machine/django-cache-machine', license='BSD', - packages=['caching', 'caching.backends'], + packages=['caching'], include_package_data=True, zip_safe=False, classifiers=[ From f6839e4274e56f3cce892faa017f8c33b40a1ad0 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 23:25:27 -0400 Subject: [PATCH 167/214] Django 1.9 support --- .travis.yml | 2 +- README.rst | 2 +- caching/base.py | 32 +++++++++++++++++++++++--------- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index 03dca3a..ff230c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,7 +24,7 @@ after_success: - coveralls env: - TOX_ENV="dj18-py27,dj18-py34,dj18-py35" - # - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" + - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" # - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" # - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" - TOX_ENV="py27-flake8,py36-flake8" diff --git a/README.rst b/README.rst index 8a61283..3113605 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.8-1.9 and Python 2.7, 3.4, 3.5 and 3.6. Installation diff --git a/caching/base.py b/caching/base.py index 57853dc..4d44e05 100644 --- a/caching/base.py +++ b/caching/base.py @@ -9,6 +9,13 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding +try: + from django.db.models.query import ValuesListIterable +except ImportError: + # ValuesListIterable is defined in Django 1.9+, and if it's present, we + # need to workaround a possible infinite recursion. See CachingQuerySet.iterator() + ValuesListIterable = None + from caching import config from .invalidation import invalidator, flush_key, make_key, byid, cache @@ -162,15 +169,22 @@ def iterator(self): iterator = super(CachingQuerySet, self).iterator if self.timeout == config.NO_CACHE: return iter(iterator()) - else: - try: - # Work-around for Django #12717. - query_string = self.query_key() - except query.EmptyResultSet: - return iterator() - if config.FETCH_BY_ID: - iterator = self.fetch_by_id - return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db)) + + try: + # Work-around for Django #12717. + query_string = self.query_key() + except query.EmptyResultSet: + return iterator() + if config.FETCH_BY_ID: + # fetch_by_id uses a ValuesList to get a list of pks. If we are + # currently about to run that query, we DON'T want to use the + # fetch_by_id iterator or else we will run into an infinite + # recursion. So, if we are about to run that query, use the + # standard iterator. + if ValuesListIterable and self._iterable_class == ValuesListIterable: + return iter(iterator()) + iterator = self.fetch_by_id + return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db)) def fetch_by_id(self): """ From 3b562d4358e15ff6c7e7d75817653bda901ac625 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 23:33:35 -0400 Subject: [PATCH 168/214] Django 1.10 support --- README.rst | 2 +- caching/ext.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 3113605..4837e8f 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8-1.9 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.8-1.10 and Python 2.7, 3.4, 3.5 and 3.6. Installation diff --git a/caching/ext.py b/caching/ext.py index acdc226..c5b5e71 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -69,7 +69,7 @@ def process_cache_arguments(self, args): def _cache_support(self, name, obj, timeout, extra, caller): """Cache helper callback.""" - if settings.TEMPLATE_DEBUG: + if settings.DEBUG: return caller() extra = ':'.join(map(encoding.smart_str, extra)) key = 'fragment:%s:%s' % (name, extra) From b7295ff8409b0aeb0b8c7f3c592656507b855a6b Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 6 Oct 2017 23:34:40 -0400 Subject: [PATCH 169/214] Tell Travis to test 1.10 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ff230c2..4690a28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,7 +25,7 @@ after_success: env: - TOX_ENV="dj18-py27,dj18-py34,dj18-py35" - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" - # - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" + - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" # - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" - TOX_ENV="py27-flake8,py36-flake8" - TOX_ENV="docs" From 6a547d8e4295985b2a84d4e115b1192cff540dbc Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Sat, 7 Oct 2017 10:03:09 -0400 Subject: [PATCH 170/214] Django 1.11 support --- .travis.yml | 2 +- README.rst | 2 +- caching/base.py | 12 ++++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4690a28..603232c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ env: - TOX_ENV="dj18-py27,dj18-py34,dj18-py35" - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" - # - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" + - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" - TOX_ENV="py27-flake8,py36-flake8" - TOX_ENV="docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. diff --git a/README.rst b/README.rst index 4837e8f..1475cce 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8-1.10 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.8-1.11 and Python 2.7, 3.4, 3.5 and 3.6. Installation diff --git a/caching/base.py b/caching/base.py index 4d44e05..f4ad1ed 100644 --- a/caching/base.py +++ b/caching/base.py @@ -157,6 +157,18 @@ def __setstate__(self, state): if self.timeout == self._default_timeout_pickle_key: self.timeout = DEFAULT_TIMEOUT + def _fetch_all(self): + """ + Django 1.11 changed _fetch_all to use self._iterable_class() rather than + self.iterator(). That bypasses our iterator, so override Queryset._fetch_all + to use our iterator. + + https://github.com/django/django/commit/f3b7c059367a4e82bbfc7e4f0d42b10975e79f0c#diff-5b0dda5eb9a242c15879dc9cd2121379 + """ + if self._result_cache is None: + self._result_cache = list(self.iterator()) + super(CachingQuerySet, self)._fetch_all() + def flush_key(self): return flush_key(self.query_key()) From 7bd4f770990f379155f97bd94c70cda9b205e78e Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Sat, 7 Oct 2017 10:03:22 -0400 Subject: [PATCH 171/214] Cleanup and docs updates --- caching/base.py | 10 +--------- docs/index.rst | 5 ++--- docs/releases.rst | 21 ++++++++++++++++++++- requirements/base.txt | 1 + requirements/py2.txt | 2 -- requirements/py3.txt | 1 - 6 files changed, 24 insertions(+), 16 deletions(-) diff --git a/caching/base.py b/caching/base.py index f4ad1ed..bccb853 100644 --- a/caching/base.py +++ b/caching/base.py @@ -17,17 +17,9 @@ ValuesListIterable = None from caching import config -from .invalidation import invalidator, flush_key, make_key, byid, cache - - -class NullHandler(logging.Handler): - - def emit(self, record): - pass - +from caching.invalidation import invalidator, flush_key, make_key, byid, cache log = logging.getLogger('caching') -log.addHandler(NullHandler()) class CachingManager(models.Manager): diff --git a/docs/index.rst b/docs/index.rst index aff85a7..655f464 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,9 +14,8 @@ affect you, please see the :ref:`release-notes`. Settings -------- -Older versions of Cache Machine required you to use customized cache backends. As of Django 1.6, -these are no longer needed and they have been removed from Cache Machine. Use the standard Django -cache backends. +Older versions of Cache Machine required you to use customized cache backends. These are no longer +needed and they have been removed from Cache Machine. Use the standard Django cache backends. COUNT queries ^^^^^^^^^^^^^ diff --git a/docs/releases.rst b/docs/releases.rst index a047f62..6e8f64b 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,8 +3,27 @@ Release Notes ================== +v1.0.0dev (TBD) +--------------- + +- Update Travis and Tox configurations +- Drop support for Python < 2.7 +- Add support for Python 3.5 and 3.6 +- Drop support for Django < 1.8 +- Add support for Django 1.9, 1.10, and 1.11 +- Removed all custom cache backends. +- Flake8 fixes + +Backwards Incompatible Changes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Cache Machine previously included custom backends for LocMem, Memcached and PyLibMemcached. These + were necessary because the core backends in old versions of Django did not support infinte + timeouts. They now do, so Cache Machine's custom backends are no longer necessary. They have been + removed, so you should revert to using the core Django backends. + v0.9.1 (2015-10-22) ------------------ +------------------- - Fix bug that prevented objects retrieved via cache machine from being re-cached by application code (see PR #103) diff --git a/requirements/base.txt b/requirements/base.txt index f9a98f4..ed304fb 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,3 +6,4 @@ redis flake8 coverage psycopg2 +python-memcached>=1.58 diff --git a/requirements/py2.txt b/requirements/py2.txt index 3bf2f67..032a559 100644 --- a/requirements/py2.txt +++ b/requirements/py2.txt @@ -1,4 +1,2 @@ -r base.txt -python-memcached mock==1.0.1 -unittest2 diff --git a/requirements/py3.txt b/requirements/py3.txt index ced3eed..a3e81b8 100644 --- a/requirements/py3.txt +++ b/requirements/py3.txt @@ -1,2 +1 @@ -r base.txt -python3-memcached From 24c977923ab5fee5525fb5fea0df6b4859e53316 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Sat, 7 Oct 2017 23:29:12 -0400 Subject: [PATCH 172/214] Add tests for failure when running .values() or .values_list() --- caching/base.py | 19 ++++++++----------- tests/test_cache.py | 10 ++++++++++ 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/caching/base.py b/caching/base.py index bccb853..8415fc2 100644 --- a/caching/base.py +++ b/caching/base.py @@ -10,11 +10,11 @@ from django.utils import encoding try: - from django.db.models.query import ValuesListIterable + from django.db.models.query import ModelIterable except ImportError: - # ValuesListIterable is defined in Django 1.9+, and if it's present, we + # ModelIterable is defined in Django 1.9+, and if it's present, we # need to workaround a possible infinite recursion. See CachingQuerySet.iterator() - ValuesListIterable = None + ModelIterable = None from caching import config from caching.invalidation import invalidator, flush_key, make_key, byid, cache @@ -173,20 +173,17 @@ def iterator(self): iterator = super(CachingQuerySet, self).iterator if self.timeout == config.NO_CACHE: return iter(iterator()) - + # ModelIterable and _iterable_class are introduced in Django 1.9. We only cache + # ModelIterable querysets (because we mark each instance as being cached with a `from_cache` + # attribute, and can't do so with dictionaries or tuples) + if getattr(self, '_iterable_class', None) != ModelIterable: + return iter(iterator()) try: # Work-around for Django #12717. query_string = self.query_key() except query.EmptyResultSet: return iterator() if config.FETCH_BY_ID: - # fetch_by_id uses a ValuesList to get a list of pks. If we are - # currently about to run that query, we DON'T want to use the - # fetch_by_id iterator or else we will run into an infinite - # recursion. So, if we are about to run that query, use the - # standard iterator. - if ValuesListIterable and self._iterable_class == ValuesListIterable: - return iter(iterator()) iterator = self.fetch_by_id return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db)) diff --git a/tests/test_cache.py b/tests/test_cache.py index d59173a..1186a13 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -63,6 +63,16 @@ def test_slice_cache(self): self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, False) self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, True) + def test_should_not_cache_values1(self): + with self.assertNumQueries(2): + Addon.objects.values('id')[0] + Addon.objects.values('id')[0] + + def test_should_not_cache_values_list(self): + with self.assertNumQueries(2): + Addon.objects.values_list('id')[0] + Addon.objects.values_list('id')[0] + def test_invalidation(self): self.assertIs(Addon.objects.get(id=1).from_cache, False) a = [x for x in Addon.objects.all() if x.id == 1][0] From f98e32fd9f8e838fd088b41be21dd65860cada42 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Mon, 9 Oct 2017 08:31:17 -0400 Subject: [PATCH 173/214] Typo --- tests/test_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cache.py b/tests/test_cache.py index 1186a13..8a87246 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -63,7 +63,7 @@ def test_slice_cache(self): self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, False) self.assertIs(Addon.objects.filter(id=1)[:1][0].from_cache, True) - def test_should_not_cache_values1(self): + def test_should_not_cache_values(self): with self.assertNumQueries(2): Addon.objects.values('id')[0] Addon.objects.values('id')[0] From 4f1fb8e2b0c75e9d19641bc46cc681fb49c64d54 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Wed, 11 Oct 2017 15:19:25 -0400 Subject: [PATCH 174/214] Attempt to tease out Django 1.8 differences --- caching/base.py | 146 ++++++++++++++++++++++++-------------------- tests/test_cache.py | 12 ++-- 2 files changed, 86 insertions(+), 72 deletions(-) diff --git a/caching/base.py b/caching/base.py index 8415fc2..c9a4d9b 100644 --- a/caching/base.py +++ b/caching/base.py @@ -9,16 +9,16 @@ from django.db.models.sql import query, EmptyResultSet from django.utils import encoding +from caching import config +from caching.invalidation import invalidator, flush_key, make_key, byid, cache + try: from django.db.models.query import ModelIterable except ImportError: # ModelIterable is defined in Django 1.9+, and if it's present, we - # need to workaround a possible infinite recursion. See CachingQuerySet.iterator() + # use it iterate over our results. If not, we fall back to a Django 1.8 compatible way. ModelIterable = None -from caching import config -from caching.invalidation import invalidator, flush_key, make_key, byid, cache - log = logging.getLogger('caching') @@ -57,21 +57,11 @@ def no_cache(self): return self.cache(config.NO_CACHE) -class CacheMachine(object): +class CacheInternalCommonMixin(object): """ - Handles all the cache management for a QuerySet. - - Takes the string representation of a query and a function that can be - called to get an iterator over some database results. + A set of methods common to our Django 1.8 and Django 1.9+ iterators. """ - def __init__(self, model, query_string, iter_function, timeout=DEFAULT_TIMEOUT, db='default'): - self.model = model - self.query_string = query_string - self.iter_function = iter_function - self.timeout = timeout - self.db = db - def query_key(self): """ Generate the cache key for this query. @@ -81,16 +71,39 @@ def query_key(self): master), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = 'qs:%s::db:%s' % (self.query_string, self.db) + query_db_string = 'qs:%s::db:%s' % (self.queryset.query_key(), self.db) return make_key(query_db_string, with_locale=False) + def cache_objects(self, objects, query_key): + """Cache query_key => objects, then update the flush lists.""" + log.debug('query_key: %s' % query_key) + query_flush = flush_key(self.queryset.query_key()) + log.debug('query_flush: %s' % query_flush) + cache.add(query_key, objects, timeout=self.timeout) + invalidator.cache_objects(self.queryset.model, objects, query_key, query_flush) + def __iter__(self): + if hasattr(super(CacheInternalCommonMixin, self), '__iter__'): + # This is the Django 1.9+ class, so we'll use super().__iter__ + # which is a ModelIterable iterator. + iterator = super(CacheInternalCommonMixin, self).__iter__ + else: + # This is Django 1.8. Use the function passed into the class + # constructor. + iterator = self.iter_function + + if self.timeout == config.NO_CACHE: + # no cache, just iterate and return the results + for obj in iterator(): + yield obj + return + + # Try to fetch from the cache. try: query_key = self.query_key() except query.EmptyResultSet: raise StopIteration - # Try to fetch from the cache. cached = cache.get(query_key) if cached is not None: log.debug('cache hit: %s' % query_key) @@ -99,28 +112,47 @@ def __iter__(self): yield obj return - # Do the database query, cache it once we have all the objects. - iterator = self.iter_function() + # Use the special FETCH_BY_ID iterator if configured. + if config.FETCH_BY_ID and hasattr(self.queryset, 'fetch_by_id'): + iterator = self.queryset.fetch_by_id + # No cached results. Do the database query, and cache it once we have + # all the objects. to_cache = [] - try: - while True: - obj = next(iterator) - obj.from_cache = False - to_cache.append(obj) - yield obj - except StopIteration: - if to_cache or config.CACHE_EMPTY_QUERYSETS: - self.cache_objects(to_cache, query_key) - raise + for obj in iterator(): + obj.from_cache = False + to_cache.append(obj) + yield obj + if to_cache or config.CACHE_EMPTY_QUERYSETS: + self.cache_objects(to_cache, query_key) - def cache_objects(self, objects, query_key): - """Cache query_key => objects, then update the flush lists.""" - log.debug('query_key: %s' % query_key) - query_flush = flush_key(self.query_string) - log.debug('query_flush: %s' % query_flush) - cache.add(query_key, objects, timeout=self.timeout) - invalidator.cache_objects(self.model, objects, query_key, query_flush) + +class CacheMachine(CacheInternalCommonMixin): + """ + Handles all the cache management for a QuerySet. + + Takes the string representation of a query and a function that can be + called to get an iterator over some database results. + """ + + def __init__(self, queryset, iter_function=None, timeout=DEFAULT_TIMEOUT, db='default'): + self.queryset = queryset + self.iter_function = iter_function + self.timeout = timeout + self.db = db + + +if ModelIterable: + class CachingModelIterable(CacheInternalCommonMixin, ModelIterable): + """ + A version of Django's ModelIterable that first tries to get results from the cache. + """ + + def __init__(self, *args, **kwargs): + super(CachingModelIterable, self).__init__(*args, **kwargs) + # copy timeout and db from queryset to allow CacheInternalCommonMixin to be DRYer + self.timeout = self.queryset.timeout + self.db = self.queryset.db class CachingQuerySet(models.query.QuerySet): @@ -130,6 +162,9 @@ class CachingQuerySet(models.query.QuerySet): def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) self.timeout = DEFAULT_TIMEOUT + if ModelIterable: + # Django 1.9+ + self._iterable_class = CachingModelIterable def __getstate__(self): """ @@ -149,18 +184,6 @@ def __setstate__(self, state): if self.timeout == self._default_timeout_pickle_key: self.timeout = DEFAULT_TIMEOUT - def _fetch_all(self): - """ - Django 1.11 changed _fetch_all to use self._iterable_class() rather than - self.iterator(). That bypasses our iterator, so override Queryset._fetch_all - to use our iterator. - - https://github.com/django/django/commit/f3b7c059367a4e82bbfc7e4f0d42b10975e79f0c#diff-5b0dda5eb9a242c15879dc9cd2121379 - """ - if self._result_cache is None: - self._result_cache = list(self.iterator()) - super(CachingQuerySet, self)._fetch_all() - def flush_key(self): return flush_key(self.query_key()) @@ -170,22 +193,11 @@ def query_key(self): return sql % params def iterator(self): + if ModelIterable: + # Django 1.9+ + return self._iterable_class(self) iterator = super(CachingQuerySet, self).iterator - if self.timeout == config.NO_CACHE: - return iter(iterator()) - # ModelIterable and _iterable_class are introduced in Django 1.9. We only cache - # ModelIterable querysets (because we mark each instance as being cached with a `from_cache` - # attribute, and can't do so with dictionaries or tuples) - if getattr(self, '_iterable_class', None) != ModelIterable: - return iter(iterator()) - try: - # Work-around for Django #12717. - query_string = self.query_key() - except query.EmptyResultSet: - return iterator() - if config.FETCH_BY_ID: - iterator = self.fetch_by_id - return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db)) + return iter(CacheMachine(self, iterator, self.timeout, db=self.db)) def fetch_by_id(self): """ @@ -320,11 +332,13 @@ def __iter__(self): while True: yield next(iterator) else: - sql = self.raw_query % tuple(self.params) - for obj in CacheMachine(self.model, sql, iterator, timeout=self.timeout): + for obj in CacheMachine(self, iterator, timeout=self.timeout): yield obj raise StopIteration + def query_key(self): + return self.raw_query % tuple(self.params) + def _function_cache_key(key): return make_key('f:%s' % key, with_locale=True) diff --git a/tests/test_cache.py b/tests/test_cache.py index 8a87246..25f40cb 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -378,19 +378,19 @@ def test_cached_method(self): # Make sure we're updating the wrapper's docstring. self.assertEqual(b.calls.__doc__, Addon.calls.__doc__) - @mock.patch('caching.base.CacheMachine') - def test_no_cache_from_manager(self, CacheMachine): + @mock.patch('caching.base.cache.get') + def test_no_cache_from_manager(self, mock_cache): a = Addon.objects.no_cache().get(id=1) self.assertEqual(a.id, 1) self.assertFalse(hasattr(a, 'from_cache')) - self.assertFalse(CacheMachine.called) + self.assertFalse(mock_cache.called) - @mock.patch('caching.base.CacheMachine') - def test_no_cache_from_queryset(self, CacheMachine): + @mock.patch('caching.base.cache.get') + def test_no_cache_from_queryset(self, mock_cache): a = Addon.objects.all().no_cache().get(id=1) self.assertEqual(a.id, 1) self.assertFalse(hasattr(a, 'from_cache')) - self.assertFalse(CacheMachine.called) + self.assertFalse(mock_cache.called) def test_timeout_from_manager(self): q = Addon.objects.cache(12).filter(id=1) From 085bf3552c66a017464f2806837484b9f47e8295 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 12 Oct 2017 18:57:44 -0400 Subject: [PATCH 175/214] enable --keepdb for faster test runs --- run_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run_tests.py b/run_tests.py index d33f08f..c4f7d33 100644 --- a/run_tests.py +++ b/run_tests.py @@ -47,7 +47,7 @@ def main(): test_cmd = ['coverage', 'run'] else: test_cmd = [] - test_cmd += [django_admin, 'test'] + test_cmd += [django_admin, 'test', '--keepdb'] results.append(call(test_cmd)) if args.with_coverage: results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) From 52d4ea6abc79946f737cc9a711ba95ca291653ab Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Thu, 12 Oct 2017 19:03:12 -0400 Subject: [PATCH 176/214] try to simplify Django 1.8/Django 1.11 compatibility --- caching/base.py | 81 +++++++++++++++++---------------------------- tests/test_cache.py | 6 ++-- 2 files changed, 33 insertions(+), 54 deletions(-) diff --git a/caching/base.py b/caching/base.py index c9a4d9b..a1a87ea 100644 --- a/caching/base.py +++ b/caching/base.py @@ -13,11 +13,17 @@ from caching.invalidation import invalidator, flush_key, make_key, byid, cache try: + # ModelIterable is defined in Django 1.9+, and if it's present, we use it + # iterate over our results. from django.db.models.query import ModelIterable except ImportError: - # ModelIterable is defined in Django 1.9+, and if it's present, we - # use it iterate over our results. If not, we fall back to a Django 1.8 compatible way. - ModelIterable = None + # If not, define a Django 1.8-compatible stub we can use instead. + class ModelIterable(object): + def __init__(self, queryset): + self.queryset = queryset + + def __iter__(self): + return super(CachingQuerySet, self.queryset).iterator() log = logging.getLogger('caching') @@ -57,11 +63,20 @@ def no_cache(self): return self.cache(config.NO_CACHE) -class CacheInternalCommonMixin(object): +class CachingModelIterable(ModelIterable): """ - A set of methods common to our Django 1.8 and Django 1.9+ iterators. + Handles all the cache management for a QuerySet. + + Takes the string representation of a query and a function that can be + called to get an iterator over some database results. """ + def __init__(self, queryset, *args, **kwargs): + self.iter_function = kwargs.pop('iter_function', None) + self.timeout = kwargs.pop('timeout', queryset.timeout) + self.db = kwargs.pop('db', queryset.db) + super(CachingModelIterable, self).__init__(queryset, *args, **kwargs) + def query_key(self): """ Generate the cache key for this query. @@ -83,14 +98,13 @@ def cache_objects(self, objects, query_key): invalidator.cache_objects(self.queryset.model, objects, query_key, query_flush) def __iter__(self): - if hasattr(super(CacheInternalCommonMixin, self), '__iter__'): - # This is the Django 1.9+ class, so we'll use super().__iter__ - # which is a ModelIterable iterator. - iterator = super(CacheInternalCommonMixin, self).__iter__ - else: - # This is Django 1.8. Use the function passed into the class - # constructor. + if self.iter_function is not None: + # This a RawQuerySet. Use the function passed into + # the class constructor. iterator = self.iter_function + else: + # Otherwise, use super().__iter__. + iterator = super(CachingModelIterable, self).__iter__ if self.timeout == config.NO_CACHE: # no cache, just iterate and return the results @@ -102,7 +116,7 @@ def __iter__(self): try: query_key = self.query_key() except query.EmptyResultSet: - raise StopIteration + return cached = cache.get(query_key) if cached is not None: @@ -127,34 +141,6 @@ def __iter__(self): self.cache_objects(to_cache, query_key) -class CacheMachine(CacheInternalCommonMixin): - """ - Handles all the cache management for a QuerySet. - - Takes the string representation of a query and a function that can be - called to get an iterator over some database results. - """ - - def __init__(self, queryset, iter_function=None, timeout=DEFAULT_TIMEOUT, db='default'): - self.queryset = queryset - self.iter_function = iter_function - self.timeout = timeout - self.db = db - - -if ModelIterable: - class CachingModelIterable(CacheInternalCommonMixin, ModelIterable): - """ - A version of Django's ModelIterable that first tries to get results from the cache. - """ - - def __init__(self, *args, **kwargs): - super(CachingModelIterable, self).__init__(*args, **kwargs) - # copy timeout and db from queryset to allow CacheInternalCommonMixin to be DRYer - self.timeout = self.queryset.timeout - self.db = self.queryset.db - - class CachingQuerySet(models.query.QuerySet): _default_timeout_pickle_key = '__DEFAULT_TIMEOUT__' @@ -162,9 +148,7 @@ class CachingQuerySet(models.query.QuerySet): def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) self.timeout = DEFAULT_TIMEOUT - if ModelIterable: - # Django 1.9+ - self._iterable_class = CachingModelIterable + self._iterable_class = CachingModelIterable def __getstate__(self): """ @@ -193,11 +177,7 @@ def query_key(self): return sql % params def iterator(self): - if ModelIterable: - # Django 1.9+ - return self._iterable_class(self) - iterator = super(CachingQuerySet, self).iterator - return iter(CacheMachine(self, iterator, self.timeout, db=self.db)) + return self._iterable_class(self) def fetch_by_id(self): """ @@ -332,9 +312,8 @@ def __iter__(self): while True: yield next(iterator) else: - for obj in CacheMachine(self, iterator, timeout=self.timeout): + for obj in CachingModelIterable(self, iter_function=iterator, timeout=self.timeout): yield obj - raise StopIteration def query_key(self): return self.raw_query % tuple(self.params) diff --git a/tests/test_cache.py b/tests/test_cache.py index 25f40cb..50fd773 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -165,15 +165,15 @@ def test_raw_cache_params(self): raw2 = list(Addon.objects.raw(sql, [2]))[0] self.assertEqual(raw2.id, 2) - @mock.patch('caching.base.CacheMachine') - def test_raw_nocache(self, CacheMachine): + @mock.patch('caching.base.CachingModelIterable') + def test_raw_nocache(self, CachingModelIterable): base.TIMEOUT = 60 sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table raw = list(Addon.objects.raw(sql, timeout=config.NO_CACHE)) self.assertEqual(len(raw), 1) raw_addon = raw[0] self.assertFalse(hasattr(raw_addon, 'from_cache')) - self.assertFalse(CacheMachine.called) + self.assertFalse(CachingModelIterable.called) @mock.patch('caching.base.cache') def test_count_cache(self, cache_mock): From 096f2b2c7b9e8604b728eb53cddc0e2ac6f4f8e5 Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 13 Oct 2017 11:43:53 -0400 Subject: [PATCH 177/214] Minor doc updates --- caching/base.py | 5 +++-- docs/index.rst | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/caching/base.py b/caching/base.py index a1a87ea..0b78ef8 100644 --- a/caching/base.py +++ b/caching/base.py @@ -67,8 +67,9 @@ class CachingModelIterable(ModelIterable): """ Handles all the cache management for a QuerySet. - Takes the string representation of a query and a function that can be - called to get an iterator over some database results. + Takes a queryset, and optionally takes a function that can be called to + get an iterator over some database results. The function is only needed + for RawQuerySets currently. """ def __init__(self, queryset, *args, **kwargs): diff --git a/docs/index.rst b/docs/index.rst index 655f464..960c210 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -202,7 +202,7 @@ file, replacing ``localhost`` with the hostname of your Redis server:: Classes That May Interest You ----------------------------- -.. autoclass:: caching.base.CacheMachine +.. autoclass:: caching.base.CachingModelIterable .. autoclass:: caching.base.CachingManager :members: From 251e5e6bfef49771c988f73a923c132a6ecc453d Mon Sep 17 00:00:00 2001 From: Vinod Kurup Date: Fri, 13 Oct 2017 12:44:15 -0400 Subject: [PATCH 178/214] Bump version for release --- caching/__init__.py | 2 +- docs/releases.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/caching/__init__.py b/caching/__init__.py index 88bdfc2..3d38e44 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('0', '9', '1') +VERSION = ('1', '0', '0') __version__ = '.'.join(VERSION) diff --git a/docs/releases.rst b/docs/releases.rst index 6e8f64b..2094e79 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,8 +3,8 @@ Release Notes ================== -v1.0.0dev (TBD) ---------------- +v1.0.0 (2017-10-13) +------------------- - Update Travis and Tox configurations - Drop support for Python < 2.7 From 3b927578f3994185c8fb88ee556abcfcba52783b Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Wed, 10 Feb 2016 21:21:11 +1100 Subject: [PATCH 179/214] Added a test case when Django is using django-redis as its caching backend. --- examples/cache_machine/django_redis_settings.py | 8 ++++++++ requirements/base.txt | 1 + run_tests.py | 1 + 3 files changed, 10 insertions(+) create mode 100644 examples/cache_machine/django_redis_settings.py diff --git a/examples/cache_machine/django_redis_settings.py b/examples/cache_machine/django_redis_settings.py new file mode 100644 index 0000000..257c837 --- /dev/null +++ b/examples/cache_machine/django_redis_settings.py @@ -0,0 +1,8 @@ +from .redis_settings import * # flake8: noqa + +CACHES = { + 'default': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': 'redis://127.0.0.1:6379/0', + }, +} diff --git a/requirements/base.txt b/requirements/base.txt index ed304fb..f1b6add 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,6 +1,7 @@ # These are the reqs to build docs and run tests. sphinx django-nose +django-redis jinja2 redis flake8 diff --git a/run_tests.py b/run_tests.py index c4f7d33..1dd72ff 100644 --- a/run_tests.py +++ b/run_tests.py @@ -22,6 +22,7 @@ 'custom_backend', 'redis_settings', 'redis_byid', + 'django_redis_settings', ) From 6b037e302d1d2d1cd23b4f896cd1b27ed55845d4 Mon Sep 17 00:00:00 2001 From: Tim Dawborn Date: Wed, 10 Feb 2016 22:05:40 +1100 Subject: [PATCH 180/214] Only run django-redis test case when on Python >= 2.7. --- run_tests.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/run_tests.py b/run_tests.py index 1dd72ff..9dec757 100644 --- a/run_tests.py +++ b/run_tests.py @@ -15,15 +15,16 @@ os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, os.path.join(ROOT, 'examples')]) -SETTINGS = ( +SETTINGS = [ 'locmem_settings', 'settings', 'memcache_byid', 'custom_backend', 'redis_settings', 'redis_byid', - 'django_redis_settings', -) +] +if sys.version_info >= (2, 7): + SETTINGS.append('django_redis_settings') def main(): From 2f182f8b5334169603212b49efee779a40cfeb00 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 13 Oct 2017 13:17:46 -0400 Subject: [PATCH 181/214] we only run tests on Python 2.7+ now --- run_tests.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/run_tests.py b/run_tests.py index 9dec757..1dd72ff 100644 --- a/run_tests.py +++ b/run_tests.py @@ -15,16 +15,15 @@ os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, os.path.join(ROOT, 'examples')]) -SETTINGS = [ +SETTINGS = ( 'locmem_settings', 'settings', 'memcache_byid', 'custom_backend', 'redis_settings', 'redis_byid', -] -if sys.version_info >= (2, 7): - SETTINGS.append('django_redis_settings') + 'django_redis_settings', +) def main(): From 5ec369cdadcdd1af007c4eaeee4bbcbdee15e4d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Thu, 5 Apr 2018 12:07:55 +0900 Subject: [PATCH 182/214] Django 2.0 Support --- .travis.yml | 1 + caching/base.py | 17 ++++++++++++++--- examples/cache_machine/settings.py | 5 +++++ tests/testapp/models.py | 4 ++-- tox.ini | 1 + 5 files changed, 23 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 603232c..b205877 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,6 +27,7 @@ env: - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" + - TOX_ENV="dj200-py34,dj200-py35,dj200-py36" - TOX_ENV="py27-flake8,py36-flake8" - TOX_ENV="docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. diff --git a/caching/base.py b/caching/base.py index 0b78ef8..d361a53 100644 --- a/caching/base.py +++ b/caching/base.py @@ -3,6 +3,7 @@ import functools import logging +import django from django.core.cache.backends.base import DEFAULT_TIMEOUT from django.db import models from django.db.models import signals @@ -289,15 +290,25 @@ def _cache_keys(self, incl_db=True): """Return the cache key for self plus all related foreign keys.""" fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields if isinstance(f, models.ForeignKey)) - keys = [fk.rel.to._cache_key(val, incl_db and self._state.db or None) - for fk, val in list(fks.items()) - if val is not None and hasattr(fk.rel.to, '_cache_key')] + + keys = [] + for fk, val in list(fks.items()): + related_model = self._get_fk_related_model(fk) + if val is not None and hasattr(related_model, '_cache_key'): + keys.append(related_model._cache_key(val, incl_db and self._state.db or None)) + return (self.get_cache_key(incl_db=incl_db),) + tuple(keys) def _flush_keys(self): """Return the flush key for self plus all related foreign keys.""" return map(flush_key, self._cache_keys(incl_db=False)) + def _get_fk_related_model(self, fk): + if django.VERSION[0] >= 2: + return fk.remote_field.model + else: + return fk.rel.to + class CachingRawQuerySet(models.query.RawQuerySet): diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 2b6027f..c909b94 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -1,5 +1,7 @@ import os +import django + CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', @@ -46,3 +48,6 @@ 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) + +if django.VERSION[0] >= 2: + MIDDLEWARE = MIDDLEWARE_CLASSES diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 1df219d..880f9be 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -22,8 +22,8 @@ class User(CachingMixin, models.Model): class Addon(CachingMixin, models.Model): val = models.IntegerField() - author1 = models.ForeignKey(User) - author2 = models.ForeignKey(User, related_name='author2_set') + author1 = models.ForeignKey(User, on_delete=models.CASCADE) + author2 = models.ForeignKey(User, related_name='author2_set', on_delete=models.CASCADE) objects = CachingManager() diff --git a/tox.ini b/tox.ini index 56232f4..eeea53e 100644 --- a/tox.ini +++ b/tox.ini @@ -24,6 +24,7 @@ deps = dj19: Django>=1.9,<1.10 dj110: Django>=1.10,<1.11 dj111: Django>=1.11,<2.0 + dj200: Django>=2.0 [testenv:docs] basepython = python3.6 From fccd81fe9f2bac6b67c785cc642e5b7273a14523 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Thu, 5 Apr 2018 12:33:25 +0900 Subject: [PATCH 183/214] Add tox env --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index eeea53e..0f2ee2a 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,7 @@ envlist = dj{18}-py{27,34,35} dj{19,110,111}-py{27,34,35,36} + dj{200}-py{34,35,36} py{27,36}-flake8 docs From d5721f72921013fe655d81fe9f7dd8770b78cadf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Thu, 5 Apr 2018 12:35:55 +0900 Subject: [PATCH 184/214] Remove specific python version --- .travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index b205877..5b2c614 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,9 +2,6 @@ language: python services: - memcached - redis-server -python: -# python selected by tox, so specify only one version here - - "3.6" addons: postgresql: "9.5" before_install: From a67d34a46606a9ecc58a441663a9ac8ab6e9d94c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Fri, 6 Apr 2018 17:25:53 +0900 Subject: [PATCH 185/214] Support Django 2.0 (Manager.use_for_related_fields is removed.) --- caching/base.py | 3 ++- tests/testapp/models.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index d361a53..1bae209 100644 --- a/caching/base.py +++ b/caching/base.py @@ -31,7 +31,8 @@ def __iter__(self): class CachingManager(models.Manager): - # Tell Django to use this manager when resolving foreign keys. + # This option removed in Django 2.0 + # Tell Django to use this manager when resolving foreign keys. (Django < 2.0) use_for_related_fields = True def get_queryset(self): diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 880f9be..2f0357f 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,5 +1,6 @@ from __future__ import unicode_literals +import django from django.db import models from django.utils import six from caching.base import CachingMixin, CachingManager, cached_method @@ -19,6 +20,11 @@ class User(CachingMixin, models.Model): objects = CachingManager() + if django.VERSION[0] >= 2: + class Meta: + # Tell Django to use this manager when resolving foreign keys. (Django >= 2.0) + base_manager_name = 'objects' + class Addon(CachingMixin, models.Model): val = models.IntegerField() From cc2d71b91b5f72e6d32b19e48f37181ab9834f62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Fri, 6 Apr 2018 17:27:06 +0900 Subject: [PATCH 186/214] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 1475cce..b921992 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8-1.11 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.8-2.0 and Python 2.7, 3.4, 3.5 and 3.6. Installation From da281a94911868f9d6a4ede79151919ef3b7fc7a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Fri, 6 Apr 2018 17:33:12 +0900 Subject: [PATCH 187/214] Update docs for support django 2.0 --- docs/index.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/index.rst b/docs/index.rst index 960c210..5a7be50 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -76,6 +76,10 @@ Here's what a minimal cached model looks like:: objects = CachingManager() + # if you support django 2.0 or more, must set base_manager_name + class Meta: + base_manager_name = 'objects' # Field name that assign `Cache Manager` + Whenever you run a query, ``CachingQuerySet`` will try to find that query in the cache. Queries are keyed by ``{prefix}:{sql}``. If it's there, we return the cached result set and everyone is happy. If the query isn't in the cache, From 62115c0c9e3aeee77ee1d4f0c33b194fbc18b18f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EC=A0=95=EA=B7=BC?= Date: Fri, 6 Apr 2018 17:35:41 +0900 Subject: [PATCH 188/214] Revert "Remove specific python version" This reverts commit d5721f72921013fe655d81fe9f7dd8770b78cadf. --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 5b2c614..b205877 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,9 @@ language: python services: - memcached - redis-server +python: +# python selected by tox, so specify only one version here + - "3.6" addons: postgresql: "9.5" before_install: From 00ba97bbf2098709f1e491913b56511647697cb6 Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:04:50 +0000 Subject: [PATCH 189/214] Remove outdated workaround in .travis.yml --- .travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index b205877..e5e65d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,9 +7,6 @@ python: - "3.6" addons: postgresql: "9.5" -before_install: - # work around https://github.com/travis-ci/travis-ci/issues/8363 - - pyenv global system 3.5 before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database travis_ci_test2;' -U postgres From 4d0f4f9912a00810977fcd89a50aa4aeec15a919 Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:15:19 +0000 Subject: [PATCH 190/214] Reorganize Travis build grid by Python version --- .travis.yml | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index e5e65d8..c14830b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,9 +2,6 @@ language: python services: - memcached - redis-server -python: -# python selected by tox, so specify only one version here - - "3.6" addons: postgresql: "9.5" before_script: @@ -19,13 +16,17 @@ script: - tox -e $TOX_ENV after_success: - coveralls -env: - - TOX_ENV="dj18-py27,dj18-py34,dj18-py35" - - TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36" - - TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36" - - TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36" - - TOX_ENV="dj200-py34,dj200-py35,dj200-py36" - - TOX_ENV="py27-flake8,py36-flake8" - - TOX_ENV="docs" + +matrix: + include: + - python: 2.7 + env: TOXENV="dj18-py27,dj19-py27,dj110-py27,dj111-py27,py27-flake8" + - python: 3.4 + env: TOXENV="dj18-py34,dj19-py34,dj110-py34,dj111-py34,dj200-py34" + - python: 3.5 + env: TOXENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35" + - python: 3.6 + env: TOXENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,py36-flake8,docs" + # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False From 6982a9a1c2273a076f168f1968b62544ab828348 Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:19:24 +0000 Subject: [PATCH 191/214] Fix TOX_ENV configuration in grid --- .travis.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index c14830b..43c337d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,13 +20,13 @@ after_success: matrix: include: - python: 2.7 - env: TOXENV="dj18-py27,dj19-py27,dj110-py27,dj111-py27,py27-flake8" + env: TOX_ENV="dj18-py27,dj19-py27,dj110-py27,dj111-py27,py27-flake8" - python: 3.4 - env: TOXENV="dj18-py34,dj19-py34,dj110-py34,dj111-py34,dj200-py34" + env: TOX_ENV="dj18-py34,dj19-py34,dj110-py34,dj111-py34,dj200-py34" - python: 3.5 - env: TOXENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35" + env: TOX_ENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35" - python: 3.6 - env: TOXENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,py36-flake8,docs" + env: TOX_ENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,py36-flake8,docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False From ac992db55b13ce612e66f41a0548bc8208421817 Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:31:17 +0000 Subject: [PATCH 192/214] Fix flake8 errors --- examples/cache_machine/custom_backend.py | 3 ++- examples/cache_machine/django_redis_settings.py | 3 ++- examples/cache_machine/locmem_settings.py | 3 ++- examples/cache_machine/memcache_byid.py | 3 ++- examples/cache_machine/redis_byid.py | 3 ++- examples/cache_machine/redis_settings.py | 3 ++- 6 files changed, 12 insertions(+), 6 deletions(-) diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index c9030d0..cee9415 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -1,4 +1,5 @@ -from .settings import * # flake8: noqa +# flake8: noqa +from .settings import * CACHES = { 'default': { diff --git a/examples/cache_machine/django_redis_settings.py b/examples/cache_machine/django_redis_settings.py index 257c837..d2130f2 100644 --- a/examples/cache_machine/django_redis_settings.py +++ b/examples/cache_machine/django_redis_settings.py @@ -1,4 +1,5 @@ -from .redis_settings import * # flake8: noqa +# flake8: noqa +from .redis_settings import * CACHES = { 'default': { diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index 8ed4fba..51af54e 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -1,4 +1,5 @@ -from .settings import * # flake8: noqa +# flake8: noqa +from .settings import * CACHES = { 'default': { diff --git a/examples/cache_machine/memcache_byid.py b/examples/cache_machine/memcache_byid.py index 4098de6..edb30f0 100644 --- a/examples/cache_machine/memcache_byid.py +++ b/examples/cache_machine/memcache_byid.py @@ -1,3 +1,4 @@ -from .settings import * # flake8: noqa +# flake8: noqa +from .settings import * FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_byid.py b/examples/cache_machine/redis_byid.py index 61eb247..fa2369d 100644 --- a/examples/cache_machine/redis_byid.py +++ b/examples/cache_machine/redis_byid.py @@ -1,3 +1,4 @@ -from .redis_settings import * # flake8: noqa +# flake8: noqa +from .redis_settings import * FETCH_BY_ID = True diff --git a/examples/cache_machine/redis_settings.py b/examples/cache_machine/redis_settings.py index 1214942..ae9fc07 100644 --- a/examples/cache_machine/redis_settings.py +++ b/examples/cache_machine/redis_settings.py @@ -1,4 +1,5 @@ -from .settings import * # flake8: noqa +# flake8: noqa +from .settings import * CACHE_MACHINE_USE_REDIS = True REDIS_BACKEND = 'redis://' From 8f001d37c235d3dc09e62db5f6e2879ff9698923 Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:43:15 +0000 Subject: [PATCH 193/214] Add support for Django 2.1 --- .travis.yml | 4 ++-- tox.ini | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 43c337d..2dbf8d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,9 +24,9 @@ matrix: - python: 3.4 env: TOX_ENV="dj18-py34,dj19-py34,dj110-py34,dj111-py34,dj200-py34" - python: 3.5 - env: TOX_ENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35" + env: TOX_ENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35,dj210-py35" - python: 3.6 - env: TOX_ENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,py36-flake8,docs" + env: TOX_ENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,dj210-py36,py36-flake8,docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False diff --git a/tox.ini b/tox.ini index 0f2ee2a..9b704b6 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,7 @@ envlist = dj{18}-py{27,34,35} dj{19,110,111}-py{27,34,35,36} dj{200}-py{34,35,36} + dj{210}-py{35,36} py{27,36}-flake8 docs @@ -25,7 +26,8 @@ deps = dj19: Django>=1.9,<1.10 dj110: Django>=1.10,<1.11 dj111: Django>=1.11,<2.0 - dj200: Django>=2.0 + dj200: Django>=2.0,<2.1 + dj210: Django>=2.1,<2.2 [testenv:docs] basepython = python3.6 From a0f178f1985bb4a32baeae9479bb67157aa741ff Mon Sep 17 00:00:00 2001 From: Antonin Delpeuch Date: Sun, 17 Feb 2019 17:51:52 +0000 Subject: [PATCH 194/214] Update README.rst to mention compatibility with Django 2.1 --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b921992..254d1b0 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8-2.0 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.8-2.1 and Python 2.7, 3.4, 3.5 and 3.6. Installation From 83b0d0a7acaf4a093d86a665013494b438801375 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:18:50 -0500 Subject: [PATCH 195/214] fix StopIteration error on Python 3.7 --- caching/base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/caching/base.py b/caching/base.py index 1bae209..64c43d5 100644 --- a/caching/base.py +++ b/caching/base.py @@ -323,7 +323,10 @@ def __iter__(self): if self.timeout == config.NO_CACHE: iterator = iterator() while True: - yield next(iterator) + try: + yield next(iterator) + except StopIteration: + return else: for obj in CachingModelIterable(self, iter_function=iterator, timeout=self.timeout): yield obj From 152f59823c7fffa21ecefc9925cecabdd06ae016 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:22:47 -0500 Subject: [PATCH 196/214] add support for new Python & Django versions, and drop support for older versions --- .travis.yml | 10 ++++++---- tox.ini | 23 +++++++++++------------ 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2dbf8d5..f3b3da2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,13 +20,15 @@ after_success: matrix: include: - python: 2.7 - env: TOX_ENV="dj18-py27,dj19-py27,dj110-py27,dj111-py27,py27-flake8" + env: TOX_ENV="dj111-py27,py27-flake8" - python: 3.4 - env: TOX_ENV="dj18-py34,dj19-py34,dj110-py34,dj111-py34,dj200-py34" + env: TOX_ENV="dj111-py34,dj200-py34" - python: 3.5 - env: TOX_ENV="dj18-py35,dj19-py35,dj110-py35,dj111-py35,dj200-py35,dj210-py35" + env: TOX_ENV="dj111-py35,dj200-py35,dj210-py35,dj220-py35" - python: 3.6 - env: TOX_ENV="dj18-py36,dj19-py36,dj110-py36,dj111-py36,dj200-py36,dj210-py36,py36-flake8,docs" + env: TOX_ENV="dj111-py36,dj200-py36,dj210-py36,dj220-py36" + - python: 3.7 + env: TOX_ENV="dj111-py37,dj200-py37,dj210-py37,dj220-py37,py37-flake8,docs" # Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. sudo: False diff --git a/tox.ini b/tox.ini index 9b704b6..6ce7c00 100644 --- a/tox.ini +++ b/tox.ini @@ -5,11 +5,11 @@ [tox] envlist = - dj{18}-py{27,34,35} - dj{19,110,111}-py{27,34,35,36} - dj{200}-py{34,35,36} - dj{210}-py{35,36} - py{27,36}-flake8 + dj{111}-py{27,34,35,36,37} + dj{200}-py{34,35,36,37} + dj{210}-py{35,36,37} + dj{220}-py{35,36,37} + py{27,37}-flake8 docs [testenv] @@ -18,19 +18,18 @@ basepython = py34: python3.4 py35: python3.5 py36: python3.6 + py37: python3.7 commands = {envpython} run_tests.py --with-coverage deps = - py{26,27}: -rrequirements/py2.txt - py{34,35,36}: -rrequirements/py3.txt - dj18: Django>=1.8,<1.9 - dj19: Django>=1.9,<1.10 - dj110: Django>=1.10,<1.11 + py27: -rrequirements/py2.txt + py{34,35,36,37}: -rrequirements/py3.txt dj111: Django>=1.11,<2.0 dj200: Django>=2.0,<2.1 dj210: Django>=2.1,<2.2 + dj220: Django==2.2b1 [testenv:docs] -basepython = python3.6 +basepython = python3.7 deps = Sphinx Django @@ -44,6 +43,6 @@ commands = /usr/bin/make html deps = flake8 commands = flake8 -[testenv:py36-flake8] +[testenv:py37-flake8] deps = flake8 commands = flake8 From 8b9e3458238fa5dce922fe8412c80abaa53528a4 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:23:51 -0500 Subject: [PATCH 197/214] bump version & add release notes for 1.1.0 --- caching/__init__.py | 2 +- docs/releases.rst | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index 3d38e44..3ff4c19 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('1', '0', '0') +VERSION = ('1', '1', '0') __version__ = '.'.join(VERSION) diff --git a/docs/releases.rst b/docs/releases.rst index 2094e79..ac35bff 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,6 +3,14 @@ Release Notes ================== +v1.1.0 (2019-02-17) +------------------- + +- Drop official support for unsupported Django versions (1.8, 1.9, and 1.10) +- Add support for Django 2.0, 2.1, and 2.2 (thanks, @JungleKim and @wetneb!) +- Add support for Python 3.7 +- Fix Travis + v1.0.0 (2017-10-13) ------------------- From 5d958cf5f1d859590433f62054da0aaa346e0f0b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:24:50 -0500 Subject: [PATCH 198/214] update version references in README --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 254d1b0..6e1a0c1 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.8-2.1 and Python 2.7, 3.4, 3.5 and 3.6. +Cache Machine works with Django 1.11-2.2 and Python 2.7, 3.4, 3.5, 3.6, and 3.7. Installation From 46854106439c0b8f627ce352c8e9e4d7381c501b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:25:29 -0500 Subject: [PATCH 199/214] add 3.7 to setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 519a5b7..d9e7ca5 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Libraries :: Python Modules', ] ) From 3a93d352b7c17a297bc50253c1a5af365506806c Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:30:01 -0500 Subject: [PATCH 200/214] doc language updates --- docs/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 5a7be50..5e221fb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -76,9 +76,9 @@ Here's what a minimal cached model looks like:: objects = CachingManager() - # if you support django 2.0 or more, must set base_manager_name + # if you use Django 2.0 or later, you must set base_manager_name class Meta: - base_manager_name = 'objects' # Field name that assign `Cache Manager` + base_manager_name = 'objects' # Attribute name of CachingManager(), above Whenever you run a query, ``CachingQuerySet`` will try to find that query in the cache. Queries are keyed by ``{prefix}:{sql}``. If it's there, we return From cddda2dcbb785c9c5e46c1afae7084fbea7957f7 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:33:28 -0500 Subject: [PATCH 201/214] switch Travis to xenial in hopes of getting Python 3.7 support --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index f3b3da2..bf7496a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: xenial language: python services: - memcached From 6d3e41034162e5d6877c5e078d55d5dcf57ed115 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:36:27 -0500 Subject: [PATCH 202/214] switch travis to postgres 10 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index bf7496a..76ded13 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ services: - memcached - redis-server addons: - postgresql: "9.5" + postgresql: "10" before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database travis_ci_test2;' -U postgres From 6bf54be3600a0191c6890fe07d864da8c3824b33 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Sun, 17 Feb 2019 21:41:54 -0500 Subject: [PATCH 203/214] organize imports --- caching/base.py | 4 ++-- caching/ext.py | 1 - caching/invalidation.py | 2 +- run_tests.py | 2 +- setup.py | 1 - 5 files changed, 4 insertions(+), 6 deletions(-) diff --git a/caching/base.py b/caching/base.py index 64c43d5..4e7ff59 100644 --- a/caching/base.py +++ b/caching/base.py @@ -7,11 +7,11 @@ from django.core.cache.backends.base import DEFAULT_TIMEOUT from django.db import models from django.db.models import signals -from django.db.models.sql import query, EmptyResultSet +from django.db.models.sql import EmptyResultSet, query from django.utils import encoding from caching import config -from caching.invalidation import invalidator, flush_key, make_key, byid, cache +from caching.invalidation import byid, cache, flush_key, invalidator, make_key try: # ModelIterable is defined in Django 1.9+, and if it's present, we use it diff --git a/caching/ext.py b/caching/ext.py index c5b5e71..a7d726a 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -2,7 +2,6 @@ from django.conf import settings from django.utils import encoding - from jinja2 import nodes from jinja2.ext import Extension diff --git a/caching/invalidation.py b/caching/invalidation.py index 360c80e..2d337d1 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -10,7 +10,7 @@ from django.core.cache import cache as default_cache from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError -from django.utils import encoding, translation, six +from django.utils import encoding, six, translation from django.utils.six.moves.urllib.parse import parse_qsl from caching import config diff --git a/run_tests.py b/run_tests.py index 1dd72ff..9320a79 100644 --- a/run_tests.py +++ b/run_tests.py @@ -4,9 +4,9 @@ DJANGO_SETTINGS_MODULE, so I run these commands which get the right env automatically. """ +import argparse import os import sys -import argparse from subprocess import call, check_output NAME = os.path.basename(os.path.dirname(__file__)) diff --git a/setup.py b/setup.py index d9e7ca5..b981ad2 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,6 @@ import caching - setup( name='django-cache-machine', version=caching.__version__, From b8a42fc5c7f922cf5bc82e2a6f939be9e2bb5aea Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Wed, 20 Nov 2019 08:37:20 +1100 Subject: [PATCH 204/214] Fix simple typo: tranfer -> transfer --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 5e221fb..4f85db8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -188,7 +188,7 @@ Redis Support Cache Machine support storing flush lists in Redis rather than memcached, which is more efficient because Redis can manipulate the lists on the server side -rather than having to tranfer the entire list back and forth for each +rather than having to transfer the entire list back and forth for each modification. To enable Redis support for Cache Machine, add the following to your settings From fae8fd9526f261b76827c32221828919b3ae6992 Mon Sep 17 00:00:00 2001 From: Aristotel Fani Date: Wed, 6 Jul 2022 14:26:48 -0400 Subject: [PATCH 205/214] Update to support Django 3.2 (#143) --- .gitignore | 1 + README.rst | 2 +- caching/base.py | 30 ++++++++---------------------- caching/ext.py | 2 -- caching/invalidation.py | 10 ++++------ run_tests.py | 2 +- tests/testapp/models.py | 9 +-------- 7 files changed, 16 insertions(+), 40 deletions(-) diff --git a/.gitignore b/.gitignore index b346e5f..195f10a 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ docs/_build *.py[co] *.egg-info *~ +.idea \ No newline at end of file diff --git a/README.rst b/README.rst index 6e1a0c1..20eb08b 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.11-2.2 and Python 2.7, 3.4, 3.5, 3.6, and 3.7. +Cache Machine works with Django 1.11-2.2, 3.0-3.2 and Python 2.7, 3.4, 3.5, 3.6, 3.7, 3.8 and 3.9. Installation diff --git a/caching/base.py b/caching/base.py index 4e7ff59..ef36e3e 100644 --- a/caching/base.py +++ b/caching/base.py @@ -1,30 +1,16 @@ -from __future__ import unicode_literals - import functools import logging import django from django.core.cache.backends.base import DEFAULT_TIMEOUT +from django.core.exceptions import EmptyResultSet from django.db import models from django.db.models import signals -from django.db.models.sql import EmptyResultSet, query from django.utils import encoding from caching import config from caching.invalidation import byid, cache, flush_key, invalidator, make_key - -try: - # ModelIterable is defined in Django 1.9+, and if it's present, we use it - # iterate over our results. - from django.db.models.query import ModelIterable -except ImportError: - # If not, define a Django 1.8-compatible stub we can use instead. - class ModelIterable(object): - def __init__(self, queryset): - self.queryset = queryset - - def __iter__(self): - return super(CachingQuerySet, self.queryset).iterator() +from django.db.models.query import ModelIterable log = logging.getLogger('caching') @@ -118,7 +104,7 @@ def __iter__(self): # Try to fetch from the cache. try: query_key = self.query_key() - except query.EmptyResultSet: + except EmptyResultSet: return cached = cache.get(query_key) @@ -230,7 +216,7 @@ def count(self): super_count = super(CachingQuerySet, self).count try: query_string = 'count:%s' % self.query_key() - except query.EmptyResultSet: + except EmptyResultSet: return 0 if self.timeout == config.NO_CACHE or config.TIMEOUT == config.NO_CACHE: return super_count() @@ -285,7 +271,7 @@ def _cache_key(cls, pk, db=None): key_parts = ('o', cls._meta, pk, db) else: key_parts = ('o', cls._meta, pk) - return ':'.join(map(encoding.smart_text, key_parts)) + return ':'.join(map(encoding.smart_str, key_parts)) def _cache_keys(self, incl_db=True): """Return the cache key for self plus all related foreign keys.""" @@ -359,10 +345,10 @@ def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): obj_key = (obj.query_key() if hasattr(obj, 'query_key') else obj.cache_key) except (AttributeError, EmptyResultSet): - log.warning('%r cannot be cached.' % encoding.smart_text(obj)) + log.warning('%r cannot be cached.' % encoding.smart_str(obj)) return f() - key = '%s:%s' % tuple(map(encoding.smart_text, (f_key, obj_key))) + key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. invalidator.add_to_flush_list( {obj.flush_key(): [_function_cache_key(key)]}) @@ -413,7 +399,7 @@ def k(o): return o.cache_key if hasattr(o, 'cache_key') else o kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())] key_parts = ('m', self.obj.cache_key, self.func.__name__, arg_keys, kwarg_keys) - key = ':'.join(map(encoding.smart_text, key_parts)) + key = ':'.join(map(encoding.smart_str, key_parts)) if key not in self.cache: f = functools.partial(self.func, self.obj, *args, **kwargs) self.cache[key] = cached_with(self.obj, f, key) diff --git a/caching/ext.py b/caching/ext.py index a7d726a..798c9d8 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals - from django.conf import settings from django.utils import encoding from jinja2 import nodes diff --git a/caching/invalidation.py b/caching/invalidation.py index 2d337d1..a2b95d7 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals - import collections import functools import hashlib @@ -10,8 +8,8 @@ from django.core.cache import cache as default_cache from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError -from django.utils import encoding, six, translation -from django.utils.six.moves.urllib.parse import parse_qsl +from django.utils import encoding, translation +from urllib.parse import parse_qsl from caching import config @@ -41,12 +39,12 @@ def make_key(k, with_locale=True): def flush_key(obj): """We put flush lists in the flush: namespace.""" - key = obj if isinstance(obj, six.string_types) else obj.get_cache_key(incl_db=False) + key = obj if isinstance(obj, str) else obj.get_cache_key(incl_db=False) return config.FLUSH + make_key(key, with_locale=False) def byid(obj): - key = obj if isinstance(obj, six.string_types) else obj.cache_key + key = obj if isinstance(obj, str) else obj.cache_key return make_key('byid:' + key) diff --git a/run_tests.py b/run_tests.py index 9320a79..5d9d1cf 100644 --- a/run_tests.py +++ b/run_tests.py @@ -37,7 +37,7 @@ def main(): args = parser.parse_args() settings = args.settings and [args.settings] or SETTINGS results = [] - django_admin = check_output(['which', 'django-admin.py']).strip() + django_admin = check_output(['which', 'django-admin']).strip() for i, settings_module in enumerate(settings): print('Running tests for: %s' % settings_module) os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings_module diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 2f0357f..60e2b27 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,14 +1,7 @@ -from __future__ import unicode_literals - import django from django.db import models -from django.utils import six from caching.base import CachingMixin, CachingManager, cached_method - -if six.PY3: - from unittest import mock -else: - import mock +from unittest import mock # This global call counter will be shared among all instances of an Addon. From 1dd6236e2f87925fe0fc659b3c144933a46d9fee Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Thu, 7 Jul 2022 04:27:14 +1000 Subject: [PATCH 206/214] docs: fix simple typo, condtions -> conditions (#144) --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 4f85db8..a38353a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -105,7 +105,7 @@ cleared. To avoid stale foreign key relations, any cached objects will be flushed when the object their foreign key points to is invalidated. During cache invalidation, we explicitly set a None value instead of just -deleting so we don't have any race condtions where: +deleting so we don't have any race conditions where: * Thread 1 -> Cache miss, get object from DB * Thread 2 -> Object saved, deleted from cache From 0ba12bccd2f1296fa6cb96800c6e86d42b3c37cd Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 15:37:39 +0000 Subject: [PATCH 207/214] prepare for 1.2.0 release - Add support for Python 3.10 and Django 4.0 - Drop support for old Python and Django versions - Switch to GitHub actions - Update release notes --- .github/workflows/ci.yaml | 86 +++++++++++++++++++ .gitignore | 4 +- .travis.yml | 35 -------- README.rst | 8 +- caching/base.py | 6 +- requirements/base.txt => dev-requirements.txt | 6 +- docs/releases.rst | 12 ++- examples/cache_machine/settings.py | 31 +++---- requirements/py2.txt | 2 - requirements/py3.txt | 1 - setup.cfg | 4 + tests/test_cache.py | 63 +++++++------- tox.ini | 41 ++++----- 13 files changed, 177 insertions(+), 122 deletions(-) create mode 100644 .github/workflows/ci.yaml delete mode 100644 .travis.yml rename requirements/base.txt => dev-requirements.txt (70%) delete mode 100644 requirements/py2.txt delete mode 100644 requirements/py3.txt diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..340d092 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,86 @@ +name: lint-test + +on: + pull_request: + branches: + - main + push: + branches: + - main + schedule: + # run once a week on early monday mornings + - cron: '22 2 * * 1' + +jobs: + pre-commit: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.0 + + test-job: + runs-on: ubuntu-20.04 + strategy: + matrix: + # tox-gh-actions will only run the tox environments which match the currently + # running python-version. See [gh-actions] in tox.ini for the mapping + python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] + # Service containers to run with `test-job` + services: + memcached: + image: memcached + ports: + - 11211:11211 + redis: + image: redis + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + postgres-default: + image: postgres + env: + POSTGRES_USER: default + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # use port 5432 for default DB + - 5432:5432 + postgres-primary2: + image: postgres + env: + POSTGRES_USER: primary2 + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # use port 5433 for primary2 DB + - 5433:5432 + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: "**/dev-requirements.txt" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r dev-requirements.txt + - name: Test with tox + env: + DATABASE_URL: postgres://default:postgres@localhost:5432/default + DATABASE_URL_2: postgres://primary2:postgres@localhost:5433/primary2 + run: tox diff --git a/.gitignore b/.gitignore index 195f10a..a1cbc95 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,6 @@ docs/_build *.py[co] *.egg-info *~ -.idea \ No newline at end of file +.idea +.direnv +.envrc diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 76ded13..0000000 --- a/.travis.yml +++ /dev/null @@ -1,35 +0,0 @@ -dist: xenial -language: python -services: - - memcached - - redis-server -addons: - postgresql: "10" -before_script: - - psql -c 'create database travis_ci_test;' -U postgres - - psql -c 'create database travis_ci_test2;' -U postgres -install: - - pip install -U pip # make sure we have the latest version - - pip install -e . - - pip install tox - - pip install coveralls -script: - - tox -e $TOX_ENV -after_success: - - coveralls - -matrix: - include: - - python: 2.7 - env: TOX_ENV="dj111-py27,py27-flake8" - - python: 3.4 - env: TOX_ENV="dj111-py34,dj200-py34" - - python: 3.5 - env: TOX_ENV="dj111-py35,dj200-py35,dj210-py35,dj220-py35" - - python: 3.6 - env: TOX_ENV="dj111-py36,dj200-py36,dj210-py36,dj220-py36" - - python: 3.7 - env: TOX_ENV="dj111-py37,dj200-py37,dj210-py37,dj220-py37,py37-flake8,docs" - -# Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster. -sudo: False diff --git a/README.rst b/README.rst index 20eb08b..629b37e 100644 --- a/README.rst +++ b/README.rst @@ -17,8 +17,12 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/. Requirements ------------ -Cache Machine works with Django 1.11-2.2, 3.0-3.2 and Python 2.7, 3.4, 3.5, 3.6, 3.7, 3.8 and 3.9. +Cache Machine currently works with: +* Django 2.2, 3.0, 3.1, 3.2, and 4.0 +* Python 3.6, 3.7, 3.8, 3.9, and 3.10 + +The last version to support Python 2.7 and Django 1.11 is ``django-cache-machine==1.1.0``. Installation ------------ @@ -35,5 +39,5 @@ Get it from `github =1.58 +tox +tox-gh-actions diff --git a/docs/releases.rst b/docs/releases.rst index ac35bff..f5f8ad8 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -3,6 +3,14 @@ Release Notes ================== +v1.2.0 (2022-07-06) +------------------- + +- Drop official support for unsupported Django versions (1.11, 2.0, 2.1) +- Add support for Django 3.0, 3.1, 3.2, and 4.0 (thanks, @johanneswilm and @Afani97!) +- Add support for Python 3.8, 3.9, and 3.10 +- Switch to GitHub Actions + v1.1.0 (2019-02-17) ------------------- @@ -38,8 +46,8 @@ v0.9.1 (2015-10-22) - Fix bug that prevented caching objects forever when using Django <= 1.5 (see PR #104) - Fix regression (introduced in 0.8) that broke invalidation when an object - was cached via a slave database and later modified or deleted via the - master database, when using master/slave replication (see PR #105). Note + was cached via a replica database and later modified or deleted via the + primary database, when using primary/replica replication (see PR #105). Note this change may cause unexpected invalidation when sharding across DBs that share both a schema and primary key values or other attributes. diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index c909b94..01989dd 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -1,5 +1,6 @@ import os +import dj_database_url import django CACHES = { @@ -9,31 +10,19 @@ }, } -TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner' - DATABASES = { - 'default': { - 'NAME': os.environ.get('TRAVIS') and 'travis_ci_test' or 'cache_machine_devel', - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - }, - 'slave': { - 'NAME': 'cache_machine_devel', - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'TEST_MIRROR': 'default', # support older Django syntax for now - }, - 'master2': { - 'NAME': os.environ.get('TRAVIS') and 'travis_ci_test2' or 'cache_machine_devel2', - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - }, - 'slave2': { - 'NAME': 'cache_machine_devel2', - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'TEST_MIRROR': 'master2', # support older Django syntax for now - }, + 'default': dj_database_url.config(default='postgres:///cache_machine_devel'), + 'primary2': dj_database_url.parse( + os.getenv('DATABASE_URL_2', 'postgres:///cache_machine_devel2') + ), } +for primary, replica in (('default', 'replica'), ('primary2', 'replica2')): + DATABASES[replica] = DATABASES[primary].copy() + DATABASES[replica]['TEST'] = {'MIRROR': primary} + +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' INSTALLED_APPS = ( - 'django_nose', 'tests.testapp', ) diff --git a/requirements/py2.txt b/requirements/py2.txt deleted file mode 100644 index 032a559..0000000 --- a/requirements/py2.txt +++ /dev/null @@ -1,2 +0,0 @@ --r base.txt -mock==1.0.1 diff --git a/requirements/py3.txt b/requirements/py3.txt deleted file mode 100644 index a3e81b8..0000000 --- a/requirements/py3.txt +++ /dev/null @@ -1 +0,0 @@ --r base.txt diff --git a/setup.cfg b/setup.cfg index 0e2e870..d089b6c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,6 @@ [flake8] max-line-length=100 +exclude= + .tox + .git + .direnv diff --git a/tests/test_cache.py b/tests/test_cache.py index 50fd773..6b2adbd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -555,57 +555,60 @@ def test_pickle_queryset(self): # use TransactionTestCase so that ['TEST']['MIRROR'] setting works # see https://code.djangoproject.com/ticket/23718 class MultiDbTestCase(TransactionTestCase): - multi_db = True + databases = {'default', 'primary2', 'replica', 'replica2'} fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] extra_apps = ['tests.testapp'] def test_multidb_cache(self): - """ Test where master and slave DB result in two different cache keys """ + """ Test where primary and replica DB result in two different cache keys """ self.assertIs(Addon.objects.get(id=1).from_cache, False) self.assertIs(Addon.objects.get(id=1).from_cache, True) - from_slave = Addon.objects.using('slave').get(id=1) - self.assertIs(from_slave.from_cache, False) - self.assertEqual(from_slave._state.db, 'slave') + from_replica = Addon.objects.using('replica').get(id=1) + self.assertIs(from_replica.from_cache, False) + self.assertEqual(from_replica._state.db, 'replica') def test_multidb_fetch_by_id(self): - """ Test where master and slave DB result in two different cache keys with FETCH_BY_ID""" + """ Test where primary and replica DB result in two different cache keys with FETCH_BY_ID""" with self.settings(FETCH_BY_ID=True): self.assertIs(Addon.objects.get(id=1).from_cache, False) self.assertIs(Addon.objects.get(id=1).from_cache, True) - from_slave = Addon.objects.using('slave').get(id=1) - self.assertIs(from_slave.from_cache, False) - self.assertEqual(from_slave._state.db, 'slave') + from_replica = Addon.objects.using('replica').get(id=1) + self.assertIs(from_replica.from_cache, False) + self.assertEqual(from_replica._state.db, 'replica') - def test_multidb_master_slave_invalidation(self): + def test_multidb_primary_replica_invalidation(self): """ Test saving an object on one DB invalidates it for all DBs """ log.debug('priming the DB & cache') - master_obj = User.objects.using('default').create(name='new-test-user') - slave_obj = User.objects.using('slave').get(name='new-test-user') - self.assertIs(slave_obj.from_cache, False) + primary_obj = User.objects.using('default').create(name='new-test-user') + replica_obj = User.objects.using('replica').get(name='new-test-user') + self.assertIs(replica_obj.from_cache, False) log.debug('deleting the original object') - User.objects.using('default').filter(pk=slave_obj.pk).delete() + User.objects.using('default').filter(pk=replica_obj.pk).delete() log.debug('re-creating record with a new primary key') - master_obj = User.objects.using('default').create(name='new-test-user') + primary_obj = User.objects.using('default').create(name='new-test-user') log.debug('attempting to force re-fetch from DB (should not use cache)') - slave_obj = User.objects.using('slave').get(name='new-test-user') - self.assertIs(slave_obj.from_cache, False) - self.assertEqual(slave_obj.pk, master_obj.pk) + replica_obj = User.objects.using('replica').get(name='new-test-user') + self.assertIs(replica_obj.from_cache, False) + self.assertEqual(replica_obj.pk, primary_obj.pk) def test_multidb_no_db_crossover(self): """ Test no crossover of objects with identical PKs """ - master_obj = User.objects.using('default').create(name='new-test-user') - master_obj2 = User.objects.using('master2').create(pk=master_obj.pk, name='other-test-user') + primary_obj = User.objects.using('default').create(name='new-test-user') + primary_obj2 = User.objects.using('primary2').create( + pk=primary_obj.pk, + name='other-test-user', + ) # prime the cache for the default DB - master_obj = User.objects.using('default').get(name='new-test-user') - self.assertIs(master_obj.from_cache, False) - master_obj = User.objects.using('default').get(name='new-test-user') - self.assertIs(master_obj.from_cache, True) - # prime the cache for the 2nd master DB - master_obj2 = User.objects.using('master2').get(name='other-test-user') - self.assertIs(master_obj2.from_cache, False) - master_obj2 = User.objects.using('master2').get(name='other-test-user') - self.assertIs(master_obj2.from_cache, True) + primary_obj = User.objects.using('default').get(name='new-test-user') + self.assertIs(primary_obj.from_cache, False) + primary_obj = User.objects.using('default').get(name='new-test-user') + self.assertIs(primary_obj.from_cache, True) + # prime the cache for the 2nd primary DB + primary_obj2 = User.objects.using('primary2').get(name='other-test-user') + self.assertIs(primary_obj2.from_cache, False) + primary_obj2 = User.objects.using('primary2').get(name='other-test-user') + self.assertIs(primary_obj2.from_cache, True) # ensure no crossover between databases - self.assertNotEqual(master_obj.name, master_obj2.name) + self.assertNotEqual(primary_obj.name, primary_obj2.name) diff --git a/tox.ini b/tox.ini index 6ce7c00..1303662 100644 --- a/tox.ini +++ b/tox.ini @@ -4,29 +4,28 @@ # and then run "tox" from this directory. [tox] -envlist = - dj{111}-py{27,34,35,36,37} - dj{200}-py{34,35,36,37} - dj{210}-py{35,36,37} - dj{220}-py{35,36,37} - py{27,37}-flake8 - docs +envlist = py3{6,7,8,9}-{2.2,3.0,3.1,3.2},py310-3.2,py3{8,9,10}-{4.0} + +[gh-actions] +python = + 3.6: py36 + 3.7: py37 + 3.8: py38 + 3.9: py39 + 3.10: py310 [testenv] -basepython = - py27: python2.7 - py34: python3.4 - py35: python3.5 - py36: python3.6 - py37: python3.7 commands = {envpython} run_tests.py --with-coverage +passenv = + DATABASE_URL + DATABASE_URL_2 deps = - py27: -rrequirements/py2.txt - py{34,35,36,37}: -rrequirements/py3.txt - dj111: Django>=1.11,<2.0 - dj200: Django>=2.0,<2.1 - dj210: Django>=2.1,<2.2 - dj220: Django==2.2b1 + -rdev-requirements.txt + 2.2: Django>=2.2,<3.0 + 3.0: Django>=3.0,<3.1 + 3.1: Django>=3.1,<3.2 + 3.2: Django>=3.2,<4.0 + 4.0: Django>=4.0,<4.1 [testenv:docs] basepython = python3.7 @@ -39,10 +38,6 @@ setenv = changedir = docs commands = /usr/bin/make html -[testenv:py27-flake8] -deps = flake8 -commands = flake8 - [testenv:py37-flake8] deps = flake8 commands = flake8 From 835c858aefdeadfb179659c58159366098aae859 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 15:50:15 +0000 Subject: [PATCH 208/214] add pre-commit config --- .pre-commit-config.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..2c58389 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.4.0 + hooks: + - id: check-added-large-files + - id: check-merge-conflict + - id: check-yaml + - id: debug-statements + - id: detect-private-key + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 From 0526081ab9c8c661d46dfab375fbedb4f041addd Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 15:51:06 +0000 Subject: [PATCH 209/214] add isort --- .pre-commit-config.yaml | 5 +++++ caching/base.py | 2 +- caching/invalidation.py | 2 +- tests/test_cache.py | 8 ++++---- tests/testapp/models.py | 5 +++-- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c58389..124fa1b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,3 +15,8 @@ repos: rev: 4.0.1 hooks: - id: flake8 + - repo: https://github.com/pycqa/isort + rev: 5.6.4 + hooks: + - id: isort + args: ["--profile", "black", "--filter-files"] diff --git a/caching/base.py b/caching/base.py index 8260345..29cf51b 100644 --- a/caching/base.py +++ b/caching/base.py @@ -6,11 +6,11 @@ from django.core.exceptions import EmptyResultSet from django.db import models from django.db.models import signals +from django.db.models.query import ModelIterable from django.utils import encoding from caching import config from caching.invalidation import byid, cache, flush_key, invalidator, make_key -from django.db.models.query import ModelIterable log = logging.getLogger('caching') diff --git a/caching/invalidation.py b/caching/invalidation.py index a2b95d7..2fb3164 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -3,13 +3,13 @@ import hashlib import logging import socket +from urllib.parse import parse_qsl from django.conf import settings from django.core.cache import cache as default_cache from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.utils import encoding, translation -from urllib.parse import parse_qsl from caching import config diff --git a/tests/test_cache.py b/tests/test_cache.py index 6b2adbd..53beb18 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,20 +1,20 @@ from __future__ import unicode_literals -import jinja2 + import logging import pickle import sys import unittest +import jinja2 from django.conf import settings from django.core.cache.backends.base import DEFAULT_TIMEOUT from django.test import TestCase, TransactionTestCase -from django.utils import translation, encoding +from django.utils import encoding, translation -from caching import base, invalidation, config +from caching import base, config, invalidation from .testapp.models import Addon, User - if sys.version_info >= (3, ): from unittest import mock else: diff --git a/tests/testapp/models.py b/tests/testapp/models.py index 60e2b27..ad29bf7 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -1,8 +1,9 @@ +from unittest import mock + import django from django.db import models -from caching.base import CachingMixin, CachingManager, cached_method -from unittest import mock +from caching.base import CachingManager, CachingMixin, cached_method # This global call counter will be shared among all instances of an Addon. call_counter = mock.Mock() From 4388e904a7365f234010550d821f1302671c097b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 15:51:46 +0000 Subject: [PATCH 210/214] add prettier --- .github/workflows/ci.yaml | 4 +- .pre-commit-config.yaml | 4 ++ .../testapp/fixtures/testapp/test_cache.json | 62 +++++++++---------- 3 files changed, 37 insertions(+), 33 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 340d092..8be490c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ on: - main schedule: # run once a week on early monday mornings - - cron: '22 2 * * 1' + - cron: "22 2 * * 1" jobs: pre-commit: @@ -25,7 +25,7 @@ jobs: matrix: # tox-gh-actions will only run the tox environments which match the currently # running python-version. See [gh-actions] in tox.ini for the mapping - python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] # Service containers to run with `test-job` services: memcached: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 124fa1b..fa578be 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,3 +20,7 @@ repos: hooks: - id: isort args: ["--profile", "black", "--filter-files"] + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.6.0" + hooks: + - id: prettier diff --git a/tests/testapp/fixtures/testapp/test_cache.json b/tests/testapp/fixtures/testapp/test_cache.json index 40221b8..e9ed2df 100644 --- a/tests/testapp/fixtures/testapp/test_cache.json +++ b/tests/testapp/fixtures/testapp/test_cache.json @@ -1,34 +1,34 @@ [ - { - "pk": 1, - "model": "testapp.user", - "fields": { - "name": "fliggy" - } - }, - { - "pk": 2, - "model": "testapp.user", - "fields": { - "name": "clouseroo" - } - }, - { - "pk": 1, - "model": "testapp.addon", - "fields": { - "author2": 1, - "author1": 2, - "val": 42 - } - }, - { - "pk": 2, - "model": "testapp.addon", - "fields": { - "author2": 1, - "author1": 2, - "val": 42 - } + { + "pk": 1, + "model": "testapp.user", + "fields": { + "name": "fliggy" } + }, + { + "pk": 2, + "model": "testapp.user", + "fields": { + "name": "clouseroo" + } + }, + { + "pk": 1, + "model": "testapp.addon", + "fields": { + "author2": 1, + "author1": 2, + "val": 42 + } + }, + { + "pk": 2, + "model": "testapp.addon", + "fields": { + "author2": 1, + "author1": 2, + "val": 42 + } + } ] From c3f4c9bbbb1c1627b83238ae34e5973c136b0550 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 18:11:08 +0000 Subject: [PATCH 211/214] reformat the code with Black --- .pre-commit-config.yaml | 5 + caching/__init__.py | 4 +- caching/base.py | 101 ++++--- caching/config.py | 26 +- caching/ext.py | 20 +- caching/invalidation.py | 82 +++--- docs/conf.py | 14 +- examples/cache_machine/custom_backend.py | 10 +- .../cache_machine/django_redis_settings.py | 6 +- examples/cache_machine/locmem_settings.py | 4 +- examples/cache_machine/redis_settings.py | 2 +- examples/cache_machine/settings.py | 38 ++- run_tests.py | 53 ++-- setup.cfg | 6 +- setup.py | 50 ++-- tests/test_cache.py | 270 ++++++++++-------- tests/testapp/models.py | 12 +- 17 files changed, 382 insertions(+), 321 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa578be..c84aeb0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,6 +20,11 @@ repos: hooks: - id: isort args: ["--profile", "black", "--filter-files"] + - repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + language_version: python3 - repo: https://github.com/pre-commit/mirrors-prettier rev: "v2.6.0" hooks: diff --git a/caching/__init__.py b/caching/__init__.py index 3ff4c19..c7c45f1 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ('1', '1', '0') -__version__ = '.'.join(VERSION) +VERSION = ("1", "1", "0") +__version__ = ".".join(VERSION) diff --git a/caching/base.py b/caching/base.py index 29cf51b..4f1c82d 100644 --- a/caching/base.py +++ b/caching/base.py @@ -12,7 +12,7 @@ from caching import config from caching.invalidation import byid, cache, flush_key, invalidator, make_key -log = logging.getLogger('caching') +log = logging.getLogger("caching") class CachingManager(models.Manager): @@ -30,8 +30,9 @@ def contribute_to_class(self, cls, name): return super(CachingManager, self).contribute_to_class(cls, name) def post_save(self, instance, **kwargs): - self.invalidate(instance, is_new_instance=kwargs['created'], - model_cls=kwargs['sender']) + self.invalidate( + instance, is_new_instance=kwargs["created"], model_cls=kwargs["sender"] + ) def post_delete(self, instance, **kwargs): self.invalidate(instance) @@ -41,8 +42,9 @@ def invalidate(self, *objects, **kwargs): invalidator.invalidate_objects(objects, **kwargs) def raw(self, raw_query, params=None, *args, **kwargs): - return CachingRawQuerySet(raw_query, self.model, params=params, - using=self._db, *args, **kwargs) + return CachingRawQuerySet( + raw_query, self.model, params=params, using=self._db, *args, **kwargs + ) def cache(self, timeout=DEFAULT_TIMEOUT): return self.get_queryset().cache(timeout) @@ -61,9 +63,9 @@ class CachingModelIterable(ModelIterable): """ def __init__(self, queryset, *args, **kwargs): - self.iter_function = kwargs.pop('iter_function', None) - self.timeout = kwargs.pop('timeout', queryset.timeout) - self.db = kwargs.pop('db', queryset.db) + self.iter_function = kwargs.pop("iter_function", None) + self.timeout = kwargs.pop("timeout", queryset.timeout) + self.db = kwargs.pop("db", queryset.db) super(CachingModelIterable, self).__init__(queryset, *args, **kwargs) def query_key(self): @@ -75,14 +77,14 @@ def query_key(self): primary), throwing a Django ValueError in the process. Django prevents cross DB model saving among related objects. """ - query_db_string = 'qs:%s::db:%s' % (self.queryset.query_key(), self.db) + query_db_string = "qs:%s::db:%s" % (self.queryset.query_key(), self.db) return make_key(query_db_string, with_locale=False) def cache_objects(self, objects, query_key): """Cache query_key => objects, then update the flush lists.""" - log.debug('query_key: %s' % query_key) + log.debug("query_key: %s" % query_key) query_flush = flush_key(self.queryset.query_key()) - log.debug('query_flush: %s' % query_flush) + log.debug("query_flush: %s" % query_flush) cache.add(query_key, objects, timeout=self.timeout) invalidator.cache_objects(self.queryset.model, objects, query_key, query_flush) @@ -109,14 +111,14 @@ def __iter__(self): cached = cache.get(query_key) if cached is not None: - log.debug('cache hit: %s' % query_key) + log.debug("cache hit: %s" % query_key) for obj in cached: obj.from_cache = True yield obj return # Use the special FETCH_BY_ID iterator if configured. - if config.FETCH_BY_ID and hasattr(self.queryset, 'fetch_by_id'): + if config.FETCH_BY_ID and hasattr(self.queryset, "fetch_by_id"): iterator = self.queryset.fetch_by_id # No cached results. Do the database query, and cache it once we have @@ -132,7 +134,7 @@ def __iter__(self): class CachingQuerySet(models.query.QuerySet): - _default_timeout_pickle_key = '__DEFAULT_TIMEOUT__' + _default_timeout_pickle_key = "__DEFAULT_TIMEOUT__" def __init__(self, *args, **kw): super(CachingQuerySet, self).__init__(*args, **kw) @@ -148,11 +150,11 @@ def __getstate__(self): state = dict() state.update(self.__dict__) if self.timeout == DEFAULT_TIMEOUT: - state['timeout'] = self._default_timeout_pickle_key + state["timeout"] = self._default_timeout_pickle_key return state def __setstate__(self, state): - """ Safely unpickle our timeout if it's a DEFAULT_TIMEOUT. """ + """Safely unpickle our timeout if it's a DEFAULT_TIMEOUT.""" self.__dict__.update(state) if self.timeout == self._default_timeout_pickle_key: self.timeout = DEFAULT_TIMEOUT @@ -179,11 +181,12 @@ def fetch_by_id(self): """ # Include columns from extra since they could be used in the query's # order_by. - vals = self.values_list('pk', *list(self.query.extra.keys())) + vals = self.values_list("pk", *list(self.query.extra.keys())) pks = [val[0] for val in vals] keys = dict((byid(self.model._cache_key(pk, self.db)), pk) for pk in pks) - cached = dict((k, v) for k, v in list(cache.get_many(keys).items()) - if v is not None) + cached = dict( + (k, v) for k, v in list(cache.get_many(keys).items()) if v is not None + ) # Pick up the objects we missed. missed = [pk for key, pk in list(keys.items()) if key not in cached] @@ -206,7 +209,7 @@ def fetch_missed(self, pks): others.query.clear_limits() # Clear out the default ordering since we order based on the query. others = others.order_by().filter(pk__in=pks) - if hasattr(others, 'no_cache'): + if hasattr(others, "no_cache"): others = others.no_cache() if self.query.select_related: others.query.select_related = self.query.select_related @@ -215,7 +218,7 @@ def fetch_missed(self, pks): def count(self): super_count = super(CachingQuerySet, self).count try: - query_string = 'count:%s' % self.query_key() + query_string = "count:%s" % self.query_key() except EmptyResultSet: return 0 if self.timeout == config.NO_CACHE or config.TIMEOUT == config.NO_CACHE: @@ -249,6 +252,7 @@ def get_cache_key(self, incl_db=True): # This ensures all cached copies of an object will be invalidated # regardless of the DB on which they're modified/deleted. return self._cache_key(self.pk, incl_db and self._state.db or None) + cache_key = property(get_cache_key) @classmethod @@ -258,7 +262,7 @@ def model_flush_key(cls): """ # use dummy PK and DB reference that will never resolve to an actual # cache key for an object - return flush_key(cls._cache_key('all-pks', 'all-dbs')) + return flush_key(cls._cache_key("all-pks", "all-dbs")) @classmethod def _cache_key(cls, pk, db=None): @@ -268,21 +272,26 @@ def _cache_key(cls, pk, db=None): For the Addon class, with a pk of 2, we get "o:addons.addon:2". """ if db: - key_parts = ('o', cls._meta, pk, db) + key_parts = ("o", cls._meta, pk, db) else: - key_parts = ('o', cls._meta, pk) - return ':'.join(map(encoding.smart_str, key_parts)) + key_parts = ("o", cls._meta, pk) + return ":".join(map(encoding.smart_str, key_parts)) def _cache_keys(self, incl_db=True): """Return the cache key for self plus all related foreign keys.""" - fks = dict((f, getattr(self, f.attname)) for f in self._meta.fields - if isinstance(f, models.ForeignKey)) + fks = dict( + (f, getattr(self, f.attname)) + for f in self._meta.fields + if isinstance(f, models.ForeignKey) + ) keys = [] for fk, val in list(fks.items()): related_model = self._get_fk_related_model(fk) - if val is not None and hasattr(related_model, '_cache_key'): - keys.append(related_model._cache_key(val, incl_db and self._state.db or None)) + if val is not None and hasattr(related_model, "_cache_key"): + keys.append( + related_model._cache_key(val, incl_db and self._state.db or None) + ) return (self.get_cache_key(incl_db=incl_db),) + tuple(keys) @@ -298,9 +307,8 @@ def _get_fk_related_model(self, fk): class CachingRawQuerySet(models.query.RawQuerySet): - def __init__(self, *args, **kw): - timeout = kw.pop('timeout', DEFAULT_TIMEOUT) + timeout = kw.pop("timeout", DEFAULT_TIMEOUT) super(CachingRawQuerySet, self).__init__(*args, **kw) self.timeout = timeout @@ -314,7 +322,9 @@ def __iter__(self): except StopIteration: return else: - for obj in CachingModelIterable(self, iter_function=iterator, timeout=self.timeout): + for obj in CachingModelIterable( + self, iter_function=iterator, timeout=self.timeout + ): yield obj def query_key(self): @@ -322,7 +332,7 @@ def query_key(self): def _function_cache_key(key): - return make_key('f:%s' % key, with_locale=True) + return make_key("f:%s" % key, with_locale=True) def cached(function, key_, duration=DEFAULT_TIMEOUT): @@ -330,11 +340,11 @@ def cached(function, key_, duration=DEFAULT_TIMEOUT): key = _function_cache_key(key_) val = cache.get(key) if val is None: - log.debug('cache miss for %s' % key) + log.debug("cache miss for %s" % key) val = function() cache.set(key, val, duration) else: - log.debug('cache hit for %s' % key) + log.debug("cache hit for %s" % key) return val @@ -342,16 +352,14 @@ def cached_with(obj, f, f_key, timeout=DEFAULT_TIMEOUT): """Helper for caching a function call within an object's flush list.""" try: - obj_key = (obj.query_key() if hasattr(obj, 'query_key') - else obj.cache_key) + obj_key = obj.query_key() if hasattr(obj, "query_key") else obj.cache_key except (AttributeError, EmptyResultSet): - log.warning('%r cannot be cached.' % encoding.smart_str(obj)) + log.warning("%r cannot be cached." % encoding.smart_str(obj)) return f() - key = '%s:%s' % tuple(map(encoding.smart_str, (f_key, obj_key))) + key = "%s:%s" % tuple(map(encoding.smart_str, (f_key, obj_key))) # Put the key generated in cached() into this object's flush list. - invalidator.add_to_flush_list( - {obj.flush_key(): [_function_cache_key(key)]}) + invalidator.add_to_flush_list({obj.flush_key(): [_function_cache_key(key)]}) return cached(f, key, timeout) @@ -364,6 +372,7 @@ class cached_method(object): Lifted from werkzeug. """ + def __init__(self, func): self.func = func functools.update_wrapper(self, func) @@ -387,6 +396,7 @@ class MethodWrapper(object): The first call for a set of (args, kwargs) will use an external cache. After that, an object-local dict cache will be used. """ + def __init__(self, obj, func): self.obj = obj self.func = func @@ -394,12 +404,13 @@ def __init__(self, obj, func): self.cache = {} def __call__(self, *args, **kwargs): - def k(o): return o.cache_key if hasattr(o, 'cache_key') else o + def k(o): + return o.cache_key if hasattr(o, "cache_key") else o + arg_keys = list(map(k, args)) kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())] - key_parts = ('m', self.obj.cache_key, self.func.__name__, - arg_keys, kwarg_keys) - key = ':'.join(map(encoding.smart_str, key_parts)) + key_parts = ("m", self.obj.cache_key, self.func.__name__, arg_keys, kwarg_keys) + key = ":".join(map(encoding.smart_str, key_parts)) if key not in self.cache: f = functools.partial(self.func, self.obj, *args, **kwargs) self.cache[key] = cached_with(self.obj, f, key) diff --git a/caching/config.py b/caching/config.py index 99e3925..f8b015c 100644 --- a/caching/config.py +++ b/caching/config.py @@ -1,18 +1,22 @@ from django.conf import settings NO_CACHE = -1 -WHOLE_MODEL = 'whole-model' +WHOLE_MODEL = "whole-model" -CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '') -FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False) -FLUSH = CACHE_PREFIX + ':flush:' -CACHE_EMPTY_QUERYSETS = getattr(settings, 'CACHE_EMPTY_QUERYSETS', False) -TIMEOUT = getattr(settings, 'CACHE_COUNT_TIMEOUT', NO_CACHE) -CACHE_INVALIDATE_ON_CREATE = getattr(settings, 'CACHE_INVALIDATE_ON_CREATE', None) -CACHE_MACHINE_NO_INVALIDATION = getattr(settings, 'CACHE_MACHINE_NO_INVALIDATION', False) -CACHE_MACHINE_USE_REDIS = getattr(settings, 'CACHE_MACHINE_USE_REDIS', False) +CACHE_PREFIX = getattr(settings, "CACHE_PREFIX", "") +FETCH_BY_ID = getattr(settings, "FETCH_BY_ID", False) +FLUSH = CACHE_PREFIX + ":flush:" +CACHE_EMPTY_QUERYSETS = getattr(settings, "CACHE_EMPTY_QUERYSETS", False) +TIMEOUT = getattr(settings, "CACHE_COUNT_TIMEOUT", NO_CACHE) +CACHE_INVALIDATE_ON_CREATE = getattr(settings, "CACHE_INVALIDATE_ON_CREATE", None) +CACHE_MACHINE_NO_INVALIDATION = getattr( + settings, "CACHE_MACHINE_NO_INVALIDATION", False +) +CACHE_MACHINE_USE_REDIS = getattr(settings, "CACHE_MACHINE_USE_REDIS", False) _invalidate_on_create_values = (None, WHOLE_MODEL) if CACHE_INVALIDATE_ON_CREATE not in _invalidate_on_create_values: - raise ValueError('CACHE_INVALIDATE_ON_CREATE must be one of: ' - '%s' % _invalidate_on_create_values) + raise ValueError( + "CACHE_INVALIDATE_ON_CREATE must be one of: " + "%s" % _invalidate_on_create_values + ) diff --git a/caching/ext.py b/caching/ext.py index 798c9d8..ac6bd8f 100644 --- a/caching/ext.py +++ b/caching/ext.py @@ -18,7 +18,8 @@ class FragmentCacheExtension(Extension): Derived from the jinja2 documentation example. """ - tags = set(['cache']) + + tags = set(["cache"]) def __init__(self, environment): super(FragmentCacheExtension, self).__init__(environment) @@ -35,30 +36,31 @@ def parse(self, parser): lineno = next(parser.stream).lineno # Use the filename + line number and first object for the cache key. - name = '%s+%s' % (self.name, lineno) + name = "%s+%s" % (self.name, lineno) args = [nodes.Const(name), parser.parse_expression()] # If there is a comma, the user provided a timeout. If not, use # None as second parameter. timeout = nodes.Const(None) extra = nodes.Const([]) - while parser.stream.skip_if('comma'): + while parser.stream.skip_if("comma"): x = parser.parse_expression() - if parser.stream.current.type == 'assign': + if parser.stream.current.type == "assign": next(parser.stream) extra = parser.parse_expression() else: timeout = x args.extend([timeout, extra]) - body = parser.parse_statements(['name:endcache'], drop_needle=True) + body = parser.parse_statements(["name:endcache"], drop_needle=True) self.process_cache_arguments(args) # now return a `CallBlock` node that calls our _cache_support # helper method on this extension. - return nodes.CallBlock(self.call_method('_cache_support', args), - [], [], body).set_lineno(lineno) + return nodes.CallBlock( + self.call_method("_cache_support", args), [], [], body + ).set_lineno(lineno) def process_cache_arguments(self, args): """Extension point for adding anything extra to the cache_support.""" @@ -68,8 +70,8 @@ def _cache_support(self, name, obj, timeout, extra, caller): """Cache helper callback.""" if settings.DEBUG: return caller() - extra = ':'.join(map(encoding.smart_str, extra)) - key = 'fragment:%s:%s' % (name, extra) + extra = ":".join(map(encoding.smart_str, extra)) + key = "fragment:%s:%s" % (name, extra) return caching.base.cached_with(obj, caller, key, timeout) diff --git a/caching/invalidation.py b/caching/invalidation.py index 2fb3164..4481c16 100644 --- a/caching/invalidation.py +++ b/caching/invalidation.py @@ -20,16 +20,16 @@ # Look for an own cache first before falling back to the default cache try: - cache = caches['cache_machine'] + cache = caches["cache_machine"] except (InvalidCacheBackendError, ValueError): cache = default_cache -log = logging.getLogger('caching.invalidation') +log = logging.getLogger("caching.invalidation") def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" - key = encoding.smart_bytes('%s:%s' % (config.CACHE_PREFIX, k)) + key = encoding.smart_bytes("%s:%s" % (config.CACHE_PREFIX, k)) if with_locale: key += encoding.smart_bytes(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice @@ -45,7 +45,7 @@ def flush_key(obj): def byid(obj): key = obj if isinstance(obj, str) else obj.cache_key - return make_key('byid:' + key) + return make_key("byid:" + key) def safe_redis(return_type): @@ -54,24 +54,26 @@ def safe_redis(return_type): return_type (optionally a callable) will be returned if there is an error. """ + def decorator(f): @functools.wraps(f) def wrapper(*args, **kw): try: return f(*args, **kw) except (socket.error, redislib.RedisError) as e: - log.error('redis error: %s' % e) + log.error("redis error: %s" % e) # log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) - if hasattr(return_type, '__call__'): + if hasattr(return_type, "__call__"): return return_type() else: return return_type + return wrapper + return decorator class Invalidator(object): - def invalidate_objects(self, objects, is_new_instance=False, model_cls=None): """Invalidate all the flush lists for the given ``objects``.""" obj_keys = [k for o in objects for k in o._cache_keys()] @@ -79,17 +81,21 @@ def invalidate_objects(self, objects, is_new_instance=False, model_cls=None): # If whole-model invalidation on create is enabled, include this model's # key in the list to be invalidated. Note that the key itself won't # contain anything in the cache, but its corresponding flush key will. - if (config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL and - is_new_instance and model_cls and hasattr(model_cls, 'model_flush_key')): + if ( + config.CACHE_INVALIDATE_ON_CREATE == config.WHOLE_MODEL + and is_new_instance + and model_cls + and hasattr(model_cls, "model_flush_key") + ): flush_keys.append(model_cls.model_flush_key()) if not obj_keys or not flush_keys: return obj_keys, flush_keys = self.expand_flush_lists(obj_keys, flush_keys) if obj_keys: - log.debug('deleting object keys: %s' % obj_keys) + log.debug("deleting object keys: %s" % obj_keys) cache.delete_many(obj_keys) if flush_keys: - log.debug('clearing flush lists: %s' % flush_keys) + log.debug("clearing flush lists: %s" % flush_keys) self.clear_flush_lists(flush_keys) def cache_objects(self, model, objects, query_key, query_flush): @@ -100,7 +106,7 @@ def cache_objects(self, model, objects, query_key, query_flush): flush_lists = collections.defaultdict(set) for key in flush_keys: - log.debug('adding %s to %s' % (query_flush, key)) + log.debug("adding %s to %s" % (query_flush, key)) flush_lists[key].add(query_flush) flush_lists[query_flush].add(query_key) # Add this query to the flush key for the entire model, if enabled @@ -112,7 +118,7 @@ def cache_objects(self, model, objects, query_key, query_flush): obj_flush = obj.flush_key() for key in obj._flush_keys(): if key not in (obj_flush, model_flush): - log.debug('related: adding %s to %s' % (obj_flush, key)) + log.debug("related: adding %s to %s" % (obj_flush, key)) flush_lists[key].add(obj_flush) if config.FETCH_BY_ID: flush_lists[key].add(byid(obj)) @@ -125,7 +131,7 @@ def expand_flush_lists(self, obj_keys, flush_keys): The search starts with the lists in `keys` and expands to any flush lists found therein. Returns ({objects to flush}, {flush keys found}). """ - log.debug('in expand_flush_lists') + log.debug("in expand_flush_lists") obj_keys = set(obj_keys) search_keys = flush_keys = set(flush_keys) @@ -139,7 +145,7 @@ def expand_flush_lists(self, obj_keys, flush_keys): else: obj_keys.add(key) if new_keys: - log.debug('search for %s found keys %s' % (search_keys, new_keys)) + log.debug("search for %s found keys %s" % (search_keys, new_keys)) flush_keys.update(new_keys) search_keys = new_keys else: @@ -158,9 +164,11 @@ def add_to_flush_list(self, mapping): def get_flush_lists(self, keys): """Return a set of object keys from the lists in `keys`.""" - return set(e for flush_list in - [_f for _f in list(cache.get_many(keys).values()) if _f] - for e in flush_list) + return set( + e + for flush_list in [_f for _f in list(cache.get_many(keys).values()) if _f] + for e in flush_list + ) def clear_flush_lists(self, keys): """Remove the given keys from the database.""" @@ -168,11 +176,10 @@ def clear_flush_lists(self, keys): class RedisInvalidator(Invalidator): - def safe_key(self, key): - if ' ' in key or '\n' in key: + if " " in key or "\n" in key: log.warning('BAD KEY: "%s"' % key) - return '' + return "" return key @safe_redis(None) @@ -183,13 +190,13 @@ def add_to_flush_list(self, mapping): for query_key in list_: # Redis happily accepts unicode, but returns byte strings, # so manually encode and decode the keys on the flush list here - pipe.sadd(self.safe_key(key), query_key.encode('utf-8')) + pipe.sadd(self.safe_key(key), query_key.encode("utf-8")) pipe.execute() @safe_redis(set) def get_flush_lists(self, keys): flush_list = redis.sunion(list(map(self.safe_key, keys))) - return [k.decode('utf-8') for k in flush_list] + return [k.decode("utf-8") for k in flush_list] @safe_redis(None) def clear_flush_lists(self, keys): @@ -197,7 +204,6 @@ def clear_flush_lists(self, keys): class NullInvalidator(Invalidator): - def add_to_flush_list(self, mapping): return @@ -207,23 +213,22 @@ def parse_backend_uri(backend_uri): Converts the "backend_uri" into a host and any extra params that are required for the backend. Returns a (host, params) tuple. """ - backend_uri_sliced = backend_uri.split('://') + backend_uri_sliced = backend_uri.split("://") if len(backend_uri_sliced) > 2: - raise InvalidCacheBackendError( - "Backend URI can't have more than one scheme://") + raise InvalidCacheBackendError("Backend URI can't have more than one scheme://") elif len(backend_uri_sliced) == 2: rest = backend_uri_sliced[1] else: rest = backend_uri_sliced[0] host = rest - qpos = rest.find('?') + qpos = rest.find("?") if qpos != -1: - params = dict(parse_qsl(rest[qpos + 1:])) + params = dict(parse_qsl(rest[qpos + 1 :])) host = rest[:qpos] else: params = {} - if host.endswith('/'): + if host.endswith("/"): host = host[:-1] return host, params @@ -233,27 +238,28 @@ def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. server, params = parse_backend_uri(settings.REDIS_BACKEND) - db = params.pop('db', 0) + db = params.pop("db", 0) try: db = int(db) except (ValueError, TypeError): db = 0 try: - socket_timeout = float(params.pop('socket_timeout')) + socket_timeout = float(params.pop("socket_timeout")) except (KeyError, ValueError): socket_timeout = None - password = params.pop('password', None) - if ':' in server: - host, port = server.split(':') + password = params.pop("password", None) + if ":" in server: + host, port = server.split(":") try: port = int(port) except (ValueError, TypeError): port = 6379 else: - host = 'localhost' + host = "localhost" port = 6379 - return redislib.Redis(host=host, port=port, db=db, password=password, - socket_timeout=socket_timeout) + return redislib.Redis( + host=host, port=port, db=db, password=password, socket_timeout=socket_timeout + ) if config.CACHE_MACHINE_NO_INVALIDATION: diff --git a/docs/conf.py b/docs/conf.py index 3341ac5..f58ba74 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,20 +3,20 @@ import caching -sys.path.append(os.path.abspath('..')) +sys.path.append(os.path.abspath("..")) # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" -extensions = ['sphinx.ext.autodoc'] +extensions = ["sphinx.ext.autodoc"] # General information about the project. -project = u'Cache Machine' -copyright = u'2010, The Zamboni Collective' +project = "Cache Machine" +copyright = "2010, The Zamboni Collective" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -28,4 +28,4 @@ # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_trees = ['_build'] +exclude_trees = ["_build"] diff --git a/examples/cache_machine/custom_backend.py b/examples/cache_machine/custom_backend.py index cee9415..6941bfa 100644 --- a/examples/cache_machine/custom_backend.py +++ b/examples/cache_machine/custom_backend.py @@ -2,11 +2,11 @@ from .settings import * CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", }, - 'cache_machine': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'localhost:11211', + "cache_machine": { + "BACKEND": "django.core.cache.backends.memcached.MemcachedCache", + "LOCATION": "localhost:11211", }, } diff --git a/examples/cache_machine/django_redis_settings.py b/examples/cache_machine/django_redis_settings.py index d2130f2..941e7ea 100644 --- a/examples/cache_machine/django_redis_settings.py +++ b/examples/cache_machine/django_redis_settings.py @@ -2,8 +2,8 @@ from .redis_settings import * CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://127.0.0.1:6379/0', + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://127.0.0.1:6379/0", }, } diff --git a/examples/cache_machine/locmem_settings.py b/examples/cache_machine/locmem_settings.py index 51af54e..054826c 100644 --- a/examples/cache_machine/locmem_settings.py +++ b/examples/cache_machine/locmem_settings.py @@ -2,7 +2,7 @@ from .settings import * CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", }, } diff --git a/examples/cache_machine/redis_settings.py b/examples/cache_machine/redis_settings.py index ae9fc07..3263e66 100644 --- a/examples/cache_machine/redis_settings.py +++ b/examples/cache_machine/redis_settings.py @@ -2,4 +2,4 @@ from .settings import * CACHE_MACHINE_USE_REDIS = True -REDIS_BACKEND = 'redis://' +REDIS_BACKEND = "redis://" diff --git a/examples/cache_machine/settings.py b/examples/cache_machine/settings.py index 01989dd..72d8036 100644 --- a/examples/cache_machine/settings.py +++ b/examples/cache_machine/settings.py @@ -4,38 +4,36 @@ import django CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'localhost:11211', + "default": { + "BACKEND": "django.core.cache.backends.memcached.MemcachedCache", + "LOCATION": "localhost:11211", }, } DATABASES = { - 'default': dj_database_url.config(default='postgres:///cache_machine_devel'), - 'primary2': dj_database_url.parse( - os.getenv('DATABASE_URL_2', 'postgres:///cache_machine_devel2') + "default": dj_database_url.config(default="postgres:///cache_machine_devel"), + "primary2": dj_database_url.parse( + os.getenv("DATABASE_URL_2", "postgres:///cache_machine_devel2") ), } -for primary, replica in (('default', 'replica'), ('primary2', 'replica2')): +for primary, replica in (("default", "replica"), ("primary2", "replica2")): DATABASES[replica] = DATABASES[primary].copy() - DATABASES[replica]['TEST'] = {'MIRROR': primary} + DATABASES[replica]["TEST"] = {"MIRROR": primary} -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" -INSTALLED_APPS = ( - 'tests.testapp', -) +INSTALLED_APPS = ("tests.testapp",) -SECRET_KEY = 'ok' +SECRET_KEY = "ok" MIDDLEWARE_CLASSES = ( - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.auth.middleware.SessionAuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ) if django.VERSION[0] >= 2: diff --git a/run_tests.py b/run_tests.py index 5d9d1cf..1ec7ade 100644 --- a/run_tests.py +++ b/run_tests.py @@ -12,46 +12,53 @@ NAME = os.path.basename(os.path.dirname(__file__)) ROOT = os.path.abspath(os.path.dirname(__file__)) -os.environ['PYTHONPATH'] = os.pathsep.join([ROOT, - os.path.join(ROOT, 'examples')]) +os.environ["PYTHONPATH"] = os.pathsep.join([ROOT, os.path.join(ROOT, "examples")]) SETTINGS = ( - 'locmem_settings', - 'settings', - 'memcache_byid', - 'custom_backend', - 'redis_settings', - 'redis_byid', - 'django_redis_settings', + "locmem_settings", + "settings", + "memcache_byid", + "custom_backend", + "redis_settings", + "redis_byid", + "django_redis_settings", ) def main(): - parser = argparse.ArgumentParser(description='Run the tests for django-cache-machine. ' - 'If no options are specified, tests will be run with ' - 'all settings files and without coverage.py.') - parser.add_argument('--with-coverage', action='store_true', - help='Run tests with coverage.py and display coverage report') - parser.add_argument('--settings', choices=SETTINGS, - help='Run tests only for the specified settings file') + parser = argparse.ArgumentParser( + description="Run the tests for django-cache-machine. " + "If no options are specified, tests will be run with " + "all settings files and without coverage.py." + ) + parser.add_argument( + "--with-coverage", + action="store_true", + help="Run tests with coverage.py and display coverage report", + ) + parser.add_argument( + "--settings", + choices=SETTINGS, + help="Run tests only for the specified settings file", + ) args = parser.parse_args() settings = args.settings and [args.settings] or SETTINGS results = [] - django_admin = check_output(['which', 'django-admin']).strip() + django_admin = check_output(["which", "django-admin"]).strip() for i, settings_module in enumerate(settings): - print('Running tests for: %s' % settings_module) - os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings_module + print("Running tests for: %s" % settings_module) + os.environ["DJANGO_SETTINGS_MODULE"] = "cache_machine.%s" % settings_module # append to the existing coverage data for all but the first run if args.with_coverage and i > 0: - test_cmd = ['coverage', 'run', '--append'] + test_cmd = ["coverage", "run", "--append"] elif args.with_coverage: - test_cmd = ['coverage', 'run'] + test_cmd = ["coverage", "run"] else: test_cmd = [] - test_cmd += [django_admin, 'test', '--keepdb'] + test_cmd += [django_admin, "test", "--keepdb"] results.append(call(test_cmd)) if args.with_coverage: - results.append(call(['coverage', 'report', '-m', '--fail-under', '70'])) + results.append(call(["coverage", "report", "-m", "--fail-under", "70"])) sys.exit(any(results) and 1 or 0) diff --git a/setup.cfg b/setup.cfg index d089b6c..122447f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,10 @@ [flake8] -max-line-length=100 +max-line-length = 88 +extend-ignore = E203 exclude= .tox .git .direnv + +[isort] +profile = black diff --git a/setup.py b/setup.py index b981ad2..d7a62d7 100644 --- a/setup.py +++ b/setup.py @@ -3,34 +3,34 @@ import caching setup( - name='django-cache-machine', + name="django-cache-machine", version=caching.__version__, - description='Automatic caching and invalidation for Django models ' - 'through the ORM.', - long_description=open('README.rst').read(), - author='Jeff Balogh', - author_email='jbalogh@mozilla.com', - url='http://github.com/django-cache-machine/django-cache-machine', - license='BSD', - packages=['caching'], + description="Automatic caching and invalidation for Django models " + "through the ORM.", + long_description=open("README.rst").read(), + author="Jeff Balogh", + author_email="jbalogh@mozilla.com", + url="http://github.com/django-cache-machine/django-cache-machine", + license="BSD", + packages=["caching"], include_package_data=True, zip_safe=False, classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Web Environment', + "Development Status :: 4 - Beta", + "Environment :: Web Environment", # I don't know what exactly this means, but why not? - 'Environment :: Web Environment :: Mozilla', - 'Framework :: Django', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Software Development :: Libraries :: Python Modules', - ] + "Environment :: Web Environment :: Mozilla", + "Framework :: Django", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Libraries :: Python Modules", + ], ) diff --git a/tests/test_cache.py b/tests/test_cache.py index 53beb18..be212dd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -15,7 +15,7 @@ from .testapp.models import Addon, User -if sys.version_info >= (3, ): +if sys.version_info >= (3,): from unittest import mock else: import mock @@ -26,13 +26,13 @@ class CachingTestCase(TestCase): - fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] - extra_apps = ['tests.testapp'] + fixtures = ["tests/testapp/fixtures/testapp/test_cache.json"] + extra_apps = ["tests.testapp"] def setUp(self): cache.clear() self.old_timeout = config.TIMEOUT - if getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): + if getattr(settings, "CACHE_MACHINE_USE_REDIS", False): invalidation.redis.flushall() def tearDown(self): @@ -41,11 +41,13 @@ def tearDown(self): def test_flush_key(self): """flush_key should work for objects or strings.""" a = Addon.objects.get(id=1) - self.assertEqual(base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a)) + self.assertEqual( + base.flush_key(a.get_cache_key(incl_db=False)), base.flush_key(a) + ) def test_cache_key(self): a = Addon.objects.get(id=1) - self.assertEqual(a.cache_key, 'o:testapp.addon:1:default') + self.assertEqual(a.cache_key, "o:testapp.addon:1:default") keys = set((a.cache_key, a.author1.cache_key, a.author2.cache_key)) self.assertEqual(set(a._cache_keys()), keys) @@ -65,13 +67,13 @@ def test_slice_cache(self): def test_should_not_cache_values(self): with self.assertNumQueries(2): - Addon.objects.values('id')[0] - Addon.objects.values('id')[0] + Addon.objects.values("id")[0] + Addon.objects.values("id")[0] def test_should_not_cache_values_list(self): with self.assertNumQueries(2): - Addon.objects.values_list('id')[0] - Addon.objects.values_list('id')[0] + Addon.objects.values_list("id")[0] + Addon.objects.values_list("id")[0] def test_invalidation(self): self.assertIs(Addon.objects.get(id=1).from_cache, False) @@ -102,7 +104,7 @@ def test_invalidation_cross_locale(self): # Do query & invalidation in a different locale. old_locale = translation.get_language() - translation.activate('fr') + translation.activate("fr") self.assertIs(Addon.objects.get(id=1).from_cache, True) a = [x for x in Addon.objects.all() if x.id == 1][0] self.assertIs(a.from_cache, True) @@ -117,10 +119,10 @@ def test_invalidation_cross_locale(self): def test_fk_invalidation(self): """When an object is invalidated, its foreign keys get invalidated.""" a = Addon.objects.get(id=1) - self.assertIs(User.objects.get(name='clouseroo').from_cache, False) + self.assertIs(User.objects.get(name="clouseroo").from_cache, False) a.save() - self.assertIs(User.objects.get(name='clouseroo').from_cache, False) + self.assertIs(User.objects.get(name="clouseroo").from_cache, False) def test_fk_parent_invalidation(self): """When a foreign key changes, any parent objects get invalidated.""" @@ -130,16 +132,16 @@ def test_fk_parent_invalidation(self): u = User.objects.get(id=a.author1.id) self.assertIs(u.from_cache, True) - u.name = 'fffuuu' + u.name = "fffuuu" u.save() self.assertIs(User.objects.get(id=a.author1.id).from_cache, False) a = Addon.objects.get(id=1) self.assertIs(a.from_cache, False) - self.assertEqual(a.author1.name, 'fffuuu') + self.assertEqual(a.author1.name, "fffuuu") def test_raw_cache(self): - sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table + sql = "SELECT * FROM %s WHERE id = 1" % Addon._meta.db_table raw = list(Addon.objects.raw(sql)) self.assertEqual(len(raw), 1) raw_addon = raw[0] @@ -158,45 +160,45 @@ def test_raw_cache(self): def test_raw_cache_params(self): """Make sure the query params are included in the cache key.""" - sql = 'SELECT * from %s WHERE id = %%s' % Addon._meta.db_table + sql = "SELECT * from %s WHERE id = %%s" % Addon._meta.db_table raw = list(Addon.objects.raw(sql, [1]))[0] self.assertEqual(raw.id, 1) raw2 = list(Addon.objects.raw(sql, [2]))[0] self.assertEqual(raw2.id, 2) - @mock.patch('caching.base.CachingModelIterable') + @mock.patch("caching.base.CachingModelIterable") def test_raw_nocache(self, CachingModelIterable): base.TIMEOUT = 60 - sql = 'SELECT * FROM %s WHERE id = 1' % Addon._meta.db_table + sql = "SELECT * FROM %s WHERE id = 1" % Addon._meta.db_table raw = list(Addon.objects.raw(sql, timeout=config.NO_CACHE)) self.assertEqual(len(raw), 1) raw_addon = raw[0] - self.assertFalse(hasattr(raw_addon, 'from_cache')) + self.assertFalse(hasattr(raw_addon, "from_cache")) self.assertFalse(CachingModelIterable.called) - @mock.patch('caching.base.cache') + @mock.patch("caching.base.cache") def test_count_cache(self, cache_mock): config.TIMEOUT = 60 - cache_mock.scheme = 'memcached' + cache_mock.scheme = "memcached" cache_mock.get.return_value = None q = Addon.objects.all() q.count() - self.assertTrue(cache_mock.set.call_args, 'set not called') + self.assertTrue(cache_mock.set.call_args, "set not called") args, kwargs = cache_mock.set.call_args key, value, timeout = args self.assertEqual(value, 2) self.assertEqual(timeout, 60) - @mock.patch('caching.base.cached') + @mock.patch("caching.base.cached") def test_count_none_timeout(self, cached_mock): config.TIMEOUT = config.NO_CACHE Addon.objects.count() self.assertEqual(cached_mock.call_count, 0) - @mock.patch('caching.base.cached') + @mock.patch("caching.base.cached") def test_count_nocache(self, cached_mock): base.TIMEOUT = 60 Addon.objects.no_cache().count() @@ -206,28 +208,29 @@ def test_queryset_flush_list(self): """Check that we're making a flush list for the queryset.""" q = Addon.objects.all() objects = list(q) # Evaluate the queryset so it gets cached. - base.invalidator.add_to_flush_list({q.flush_key(): ['remove-me']}) - cache.set('remove-me', 15) + base.invalidator.add_to_flush_list({q.flush_key(): ["remove-me"]}) + cache.set("remove-me", 15) Addon.objects.invalidate(objects[0]) self.assertIs(cache.get(q.flush_key()), None) - self.assertIs(cache.get('remove-me'), None) + self.assertIs(cache.get("remove-me"), None) def test_jinja_cache_tag_queryset(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) def check(q, expected): t = env.from_string( "{% cache q %}{% for x in q %}{{ x.id }}:{{ x.val }};" - "{% endfor %}{% endcache %}") + "{% endfor %}{% endcache %}" + ) self.assertEqual(t.render(q=q), expected) # Get the template in cache, then hijack iterator to make sure we're # hitting the cached fragment. - check(Addon.objects.all(), '1:42;2:42;') + check(Addon.objects.all(), "1:42;2:42;") qs = Addon.objects.all() qs.iterator = mock.Mock() - check(qs, '1:42;2:42;') + check(qs, "1:42;2:42;") self.assertFalse(qs.iterator.called) # Make changes, make sure we dropped the cached fragment. @@ -239,62 +242,69 @@ def check(q, expected): cache.get(q.flush_key()) self.assertIs(cache.get(q.flush_key()), None) - check(Addon.objects.all(), '1:17;2:42;') + check(Addon.objects.all(), "1:17;2:42;") qs = Addon.objects.all() qs.iterator = mock.Mock() - check(qs, '1:17;2:42;') + check(qs, "1:17;2:42;") def test_jinja_cache_tag_object(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) def check(obj, expected): t = env.from_string( - '{% cache obj, 30 %}{{ obj.id }}:{{ obj.val }}{% endcache %}') + "{% cache obj, 30 %}{{ obj.id }}:{{ obj.val }}{% endcache %}" + ) self.assertEqual(t.render(obj=obj), expected) - check(addon, '1:42') + check(addon, "1:42") addon.val = 17 addon.save() - check(addon, '1:17') + check(addon, "1:17") def test_jinja_multiple_tags(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) - template = ("{% cache obj %}{{ obj.id }}{% endcache %}\n" - "{% cache obj %}{{ obj.val }}{% endcache %}") + template = ( + "{% cache obj %}{{ obj.id }}{% endcache %}\n" + "{% cache obj %}{{ obj.val }}{% endcache %}" + ) def check(obj, expected): t = env.from_string(template) self.assertEqual(t.render(obj=obj), expected) - check(addon, '1\n42') + check(addon, "1\n42") addon.val = 17 addon.save() - check(addon, '1\n17') + check(addon, "1\n17") def test_jinja_cache_tag_extra(self): - env = jinja2.Environment(extensions=['caching.ext.cache']) + env = jinja2.Environment(extensions=["caching.ext.cache"]) addon = Addon.objects.get(id=1) - template = ('{% cache obj, extra=[obj.key] %}{{ obj.id }}:' - '{{ obj.key }}{% endcache %}') + template = ( + "{% cache obj, extra=[obj.key] %}{{ obj.id }}:" + "{{ obj.key }}{% endcache %}" + ) def check(obj, expected): t = env.from_string(template) self.assertEqual(t.render(obj=obj), expected) addon.key = 1 - check(addon, '1:1') + check(addon, "1:1") addon.key = 2 - check(addon, '1:2') + check(addon, "1:2") - template = ('{% cache obj, 10, extra=[obj.key] %}{{ obj.id }}:' - '{{ obj.key }}{% endcache %}') + template = ( + "{% cache obj, 10, extra=[obj.key] %}{{ obj.id }}:" + "{{ obj.key }}{% endcache %}" + ) addon.key = 1 - check(addon, '1:1') + check(addon, "1:1") addon.key = 2 - check(addon, '1:2') + check(addon, "1:2") def test_cached_with(self): counter = mock.Mock() @@ -305,7 +315,8 @@ def expensive(): a = Addon.objects.get(id=1) - def f(): return base.cached_with(a, expensive, 'key') + def f(): + return base.cached_with(a, expensive, "key") # Only gets called once. self.assertEqual(f(), 1) @@ -313,7 +324,7 @@ def f(): return base.cached_with(a, expensive, 'key') # Switching locales does not reuse the cache. old_locale = translation.get_language() - translation.activate('fr') + translation.activate("fr") self.assertEqual(f(), 2) # Called again after flush. @@ -326,7 +337,8 @@ def f(): return base.cached_with(a, expensive, 'key') counter.reset_mock() q = Addon.objects.filter(id=1) - def f(): return base.cached_with(q, expensive, 'key') + def f(): + return base.cached_with(q, expensive, "key") # Only gets called once. self.assertEqual(f(), 1) @@ -345,17 +357,20 @@ def f(): counter() return counter.call_count - self.assertEqual(base.cached_with([], f, 'key'), 1) + self.assertEqual(base.cached_with([], f, "key"), 1) def test_cached_with_unicode(self): - u = encoding.smart_bytes('\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 ' - '\\u05d0\\u05d5\\u05e1\\u05e3') + u = encoding.smart_bytes( + "\\u05ea\\u05d9\\u05d0\\u05d5\\u05e8 " "\\u05d0\\u05d5\\u05e1\\u05e3" + ) obj = mock.Mock() - obj.query_key.return_value = 'xxx' - obj.flush_key.return_value = 'key' + obj.query_key.return_value = "xxx" + obj.flush_key.return_value = "key" + + def f(): + return 1 - def f(): return 1 - self.assertEqual(base.cached_with(obj, f, 'adf:%s' % u), 1) + self.assertEqual(base.cached_with(obj, f, "adf:%s" % u), 1) def test_cached_method(self): a = Addon.objects.get(id=1) @@ -378,45 +393,46 @@ def test_cached_method(self): # Make sure we're updating the wrapper's docstring. self.assertEqual(b.calls.__doc__, Addon.calls.__doc__) - @mock.patch('caching.base.cache.get') + @mock.patch("caching.base.cache.get") def test_no_cache_from_manager(self, mock_cache): a = Addon.objects.no_cache().get(id=1) self.assertEqual(a.id, 1) - self.assertFalse(hasattr(a, 'from_cache')) + self.assertFalse(hasattr(a, "from_cache")) self.assertFalse(mock_cache.called) - @mock.patch('caching.base.cache.get') + @mock.patch("caching.base.cache.get") def test_no_cache_from_queryset(self, mock_cache): a = Addon.objects.all().no_cache().get(id=1) self.assertEqual(a.id, 1) - self.assertFalse(hasattr(a, 'from_cache')) + self.assertFalse(hasattr(a, "from_cache")) self.assertFalse(mock_cache.called) def test_timeout_from_manager(self): q = Addon.objects.cache(12).filter(id=1) self.assertEqual(q.timeout, 12) a = q.get() - self.assertTrue(hasattr(a, 'from_cache')) + self.assertTrue(hasattr(a, "from_cache")) self.assertEqual(a.id, 1) def test_timeout_from_queryset(self): q = Addon.objects.all().cache(12).filter(id=1) self.assertEqual(q.timeout, 12) a = q.get() - self.assertTrue(hasattr(a, 'from_cache')) + self.assertTrue(hasattr(a, "from_cache")) self.assertEqual(a.id, 1) @unittest.skipUnless( - any(['memcache' in c['BACKEND'] for c in settings.CACHES.values()]), - 'This test requires that Django use memcache') - @mock.patch('memcache.Client.set') + any(["memcache" in c["BACKEND"] for c in settings.CACHES.values()]), + "This test requires that Django use memcache", + ) + @mock.patch("memcache.Client.set") def test_infinite_timeout(self, mock_set): """ Test that memcached infinite timeouts work with all Django versions. """ - cache.set('foo', 'bar', timeout=None) + cache.set("foo", "bar", timeout=None) # for memcached, 0 timeout means store forever - mock_set.assert_called_with(':1:foo', 'bar', 0) + mock_set.assert_called_with(":1:foo", "bar", 0) def test_cache_and_no_cache(self): """Whatever happens last sticks.""" @@ -429,14 +445,14 @@ def test_cache_and_no_cache(self): self.assertEqual(q.timeout, 12) self.assertNotEqual(no_cache.timeout, 12) - self.assertFalse(hasattr(no_cache.get(), 'from_cache')) + self.assertFalse(hasattr(no_cache.get(), "from_cache")) self.assertEqual(q.get().id, 1) - self.assertTrue(hasattr(q.get(), 'from_cache')) + self.assertTrue(hasattr(q.get(), "from_cache")) - @mock.patch('caching.base.cache') + @mock.patch("caching.base.cache") def test_cache_machine_timeout(self, cache): - cache.scheme = 'memcached' + cache.scheme = "memcached" cache.get.return_value = None cache.get_many.return_value = {} @@ -445,10 +461,10 @@ def test_cache_machine_timeout(self, cache): self.assertTrue(cache.add.called) args, kwargs = cache.add.call_args - self.assertEqual(kwargs, {'timeout': 12}) + self.assertEqual(kwargs, {"timeout": 12}) def test_unicode_key(self): - list(User.objects.filter(name='\\xfcmla\\xfct')) + list(User.objects.filter(name="\\xfcmla\\xfct")) def test_empty_in(self): # Raised an exception before fixing #2. @@ -463,7 +479,7 @@ def test_empty_queryset(self): with self.assertNumQueries(k): self.assertEqual(len(Addon.objects.filter(pk=42)), 0) - @mock.patch('caching.config.CACHE_EMPTY_QUERYSETS', True) + @mock.patch("caching.config.CACHE_EMPTY_QUERYSETS", True) def test_cache_empty_queryset(self): for k in (1, 0): with self.assertNumQueries(k): @@ -483,41 +499,42 @@ def test_invalidate_new_related_object(self): self.assertEqual([a.val for a in u.addon_set.all()], [42, 17]) def test_make_key_unicode(self): - translation.activate('en-US') - f = 'fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e' + translation.activate("en-US") + f = "fragment\xe9\x9b\xbb\xe8\x85\xa6\xe7\x8e" # This would crash with a unicode error. base.make_key(f, with_locale=True) translation.deactivate() - @mock.patch('caching.invalidation.cache.get_many') + @mock.patch("caching.invalidation.cache.get_many") def test_get_flush_lists_none(self, cache_mock): - if not getattr(settings, 'CACHE_MACHINE_USE_REDIS', False): + if not getattr(settings, "CACHE_MACHINE_USE_REDIS", False): cache_mock.return_value.values.return_value = [None, [1]] self.assertEqual(base.invalidator.get_flush_lists(None), set([1])) def test_parse_backend_uri(self): - """ Test that parse_backend_uri works as intended. Regression for #92. """ + """Test that parse_backend_uri works as intended. Regression for #92.""" from caching.invalidation import parse_backend_uri - uri = 'redis://127.0.0.1:6379?socket_timeout=5' + + uri = "redis://127.0.0.1:6379?socket_timeout=5" host, params = parse_backend_uri(uri) - self.assertEqual(host, '127.0.0.1:6379') - self.assertEqual(params, {'socket_timeout': '5'}) + self.assertEqual(host, "127.0.0.1:6379") + self.assertEqual(params, {"socket_timeout": "5"}) - @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', 'whole-model') + @mock.patch("caching.config.CACHE_INVALIDATE_ON_CREATE", "whole-model") def test_invalidate_on_create_enabled(self): - """ Test that creating new objects invalidates cached queries for that model. """ - self.assertEqual([a.name for a in User.objects.all()], ['fliggy', 'clouseroo']) - User.objects.create(name='spam') + """Test that creating new objects invalidates cached queries for that model.""" + self.assertEqual([a.name for a in User.objects.all()], ["fliggy", "clouseroo"]) + User.objects.create(name="spam") users = User.objects.all() # our new user should show up and the query should not have come from the cache - self.assertEqual([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + self.assertEqual([a.name for a in users], ["fliggy", "clouseroo", "spam"]) self.assertFalse(any([u.from_cache for u in users])) # if we run it again, it should be cached this time users = User.objects.all() - self.assertEqual([a.name for a in users], ['fliggy', 'clouseroo', 'spam']) + self.assertEqual([a.name for a in users], ["fliggy", "clouseroo", "spam"]) self.assertTrue(all([u.from_cache for u in User.objects.all()])) - @mock.patch('caching.config.CACHE_INVALIDATE_ON_CREATE', None) + @mock.patch("caching.config.CACHE_INVALIDATE_ON_CREATE", None) def test_invalidate_on_create_disabled(self): """ Test that creating new objects does NOT invalidate cached queries when @@ -526,7 +543,7 @@ def test_invalidate_on_create_disabled(self): users = User.objects.all() self.assertTrue(users, "Can't run this test without some users") self.assertFalse(any([u.from_cache for u in users])) - User.objects.create(name='spam') + User.objects.create(name="spam") self.assertTrue(all([u.from_cache for u in User.objects.all()])) def test_pickle_queryset(self): @@ -540,14 +557,14 @@ def test_pickle_queryset(self): self.assertEqual(q1.timeout, DEFAULT_TIMEOUT) pickled = pickle.dumps(q1) new_timeout = object() - with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): + with mock.patch("caching.base.DEFAULT_TIMEOUT", new_timeout): q2 = pickle.loads(pickled) self.assertEqual(q2.timeout, new_timeout) # Make sure values other than DEFAULT_TIMEOUT remain unaffected: q1 = Addon.objects.cache(10).all() self.assertEqual(q1.timeout, 10) pickled = pickle.dumps(q1) - with mock.patch('caching.base.DEFAULT_TIMEOUT', new_timeout): + with mock.patch("caching.base.DEFAULT_TIMEOUT", new_timeout): q2 = pickle.loads(pickled) self.assertEqual(q2.timeout, 10) @@ -555,60 +572,63 @@ def test_pickle_queryset(self): # use TransactionTestCase so that ['TEST']['MIRROR'] setting works # see https://code.djangoproject.com/ticket/23718 class MultiDbTestCase(TransactionTestCase): - databases = {'default', 'primary2', 'replica', 'replica2'} - fixtures = ['tests/testapp/fixtures/testapp/test_cache.json'] - extra_apps = ['tests.testapp'] + databases = {"default", "primary2", "replica", "replica2"} + fixtures = ["tests/testapp/fixtures/testapp/test_cache.json"] + extra_apps = ["tests.testapp"] def test_multidb_cache(self): - """ Test where primary and replica DB result in two different cache keys """ + """Test where primary and replica DB result in two different cache keys""" self.assertIs(Addon.objects.get(id=1).from_cache, False) self.assertIs(Addon.objects.get(id=1).from_cache, True) - from_replica = Addon.objects.using('replica').get(id=1) + from_replica = Addon.objects.using("replica").get(id=1) self.assertIs(from_replica.from_cache, False) - self.assertEqual(from_replica._state.db, 'replica') + self.assertEqual(from_replica._state.db, "replica") def test_multidb_fetch_by_id(self): - """ Test where primary and replica DB result in two different cache keys with FETCH_BY_ID""" + """ + Test where primary and replica DB result in two different cache keys + with FETCH_BY_ID + """ with self.settings(FETCH_BY_ID=True): self.assertIs(Addon.objects.get(id=1).from_cache, False) self.assertIs(Addon.objects.get(id=1).from_cache, True) - from_replica = Addon.objects.using('replica').get(id=1) + from_replica = Addon.objects.using("replica").get(id=1) self.assertIs(from_replica.from_cache, False) - self.assertEqual(from_replica._state.db, 'replica') + self.assertEqual(from_replica._state.db, "replica") def test_multidb_primary_replica_invalidation(self): - """ Test saving an object on one DB invalidates it for all DBs """ - log.debug('priming the DB & cache') - primary_obj = User.objects.using('default').create(name='new-test-user') - replica_obj = User.objects.using('replica').get(name='new-test-user') + """Test saving an object on one DB invalidates it for all DBs""" + log.debug("priming the DB & cache") + primary_obj = User.objects.using("default").create(name="new-test-user") + replica_obj = User.objects.using("replica").get(name="new-test-user") self.assertIs(replica_obj.from_cache, False) - log.debug('deleting the original object') - User.objects.using('default').filter(pk=replica_obj.pk).delete() - log.debug('re-creating record with a new primary key') - primary_obj = User.objects.using('default').create(name='new-test-user') - log.debug('attempting to force re-fetch from DB (should not use cache)') - replica_obj = User.objects.using('replica').get(name='new-test-user') + log.debug("deleting the original object") + User.objects.using("default").filter(pk=replica_obj.pk).delete() + log.debug("re-creating record with a new primary key") + primary_obj = User.objects.using("default").create(name="new-test-user") + log.debug("attempting to force re-fetch from DB (should not use cache)") + replica_obj = User.objects.using("replica").get(name="new-test-user") self.assertIs(replica_obj.from_cache, False) self.assertEqual(replica_obj.pk, primary_obj.pk) def test_multidb_no_db_crossover(self): - """ Test no crossover of objects with identical PKs """ - primary_obj = User.objects.using('default').create(name='new-test-user') - primary_obj2 = User.objects.using('primary2').create( + """Test no crossover of objects with identical PKs""" + primary_obj = User.objects.using("default").create(name="new-test-user") + primary_obj2 = User.objects.using("primary2").create( pk=primary_obj.pk, - name='other-test-user', + name="other-test-user", ) # prime the cache for the default DB - primary_obj = User.objects.using('default').get(name='new-test-user') + primary_obj = User.objects.using("default").get(name="new-test-user") self.assertIs(primary_obj.from_cache, False) - primary_obj = User.objects.using('default').get(name='new-test-user') + primary_obj = User.objects.using("default").get(name="new-test-user") self.assertIs(primary_obj.from_cache, True) # prime the cache for the 2nd primary DB - primary_obj2 = User.objects.using('primary2').get(name='other-test-user') + primary_obj2 = User.objects.using("primary2").get(name="other-test-user") self.assertIs(primary_obj2.from_cache, False) - primary_obj2 = User.objects.using('primary2').get(name='other-test-user') + primary_obj2 = User.objects.using("primary2").get(name="other-test-user") self.assertIs(primary_obj2.from_cache, True) # ensure no crossover between databases self.assertNotEqual(primary_obj.name, primary_obj2.name) diff --git a/tests/testapp/models.py b/tests/testapp/models.py index ad29bf7..429d44e 100644 --- a/tests/testapp/models.py +++ b/tests/testapp/models.py @@ -15,21 +15,25 @@ class User(CachingMixin, models.Model): objects = CachingManager() if django.VERSION[0] >= 2: + class Meta: - # Tell Django to use this manager when resolving foreign keys. (Django >= 2.0) - base_manager_name = 'objects' + # Tell Django to use this manager when resolving foreign keys. + # (Django >= 2.0) + base_manager_name = "objects" class Addon(CachingMixin, models.Model): val = models.IntegerField() author1 = models.ForeignKey(User, on_delete=models.CASCADE) - author2 = models.ForeignKey(User, related_name='author2_set', on_delete=models.CASCADE) + author2 = models.ForeignKey( + User, related_name="author2_set", on_delete=models.CASCADE + ) objects = CachingManager() class Meta: # without this, Postgres & SQLite return objects in different orders: - ordering = ('pk',) + ordering = ("pk",) @cached_method def calls(self, arg=1): From 247f2597880046ed08682151c8e8e5ee7180b81b Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 18:13:28 +0000 Subject: [PATCH 212/214] increment VERSION to 1.2.0 --- caching/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caching/__init__.py b/caching/__init__.py index c7c45f1..ceb5c7f 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals -VERSION = ("1", "1", "0") +VERSION = ("1", "2", "0") __version__ = ".".join(VERSION) From 63cfbf2c530e9d95a95febce2f467dc878631046 Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Wed, 6 Jul 2022 18:25:38 +0000 Subject: [PATCH 213/214] update status badge --- README.rst | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 629b37e..4ed2704 100644 --- a/README.rst +++ b/README.rst @@ -7,11 +7,8 @@ through the ORM. For full docs, see https://cache-machine.readthedocs.org/en/latest/. -.. image:: https://travis-ci.org/django-cache-machine/django-cache-machine.svg?branch=master - :target: https://travis-ci.org/django-cache-machine/django-cache-machine - -.. image:: https://coveralls.io/repos/django-cache-machine/django-cache-machine/badge.svg?branch=master - :target: https://coveralls.io/r/django-cache-machine/django-cache-machine?branch=master +.. image:: https://github.com/django-cache-machine/django-cache-machine/actions/workflows/ci.yaml/badge.svg + :target: https://github.com/django-cache-machine/django-cache-machine/actions/workflows/ci.yaml Requirements From 01f12b9a4907064440dcd8b2d4b1375b26b11e4d Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Mon, 20 Feb 2023 13:13:51 -0500 Subject: [PATCH 214/214] Update isort to fix incompatiblity with latest poetry version --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c84aeb0..7b35366 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pycqa/isort - rev: 5.6.4 + rev: 5.11.5 hooks: - id: isort args: ["--profile", "black", "--filter-files"]