Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
bfca2e2
add newer django+python to tox+travis
tobiasmcnulty Jun 3, 2016
004261f
convert assigned lambda funcs to def statements
tobiasmcnulty Jun 3, 2016
3cf88d7
Merge branch 'master' into tox-updates
tobiasmcnulty Jun 3, 2016
23b28c6
fix flake8 errors
tobiasmcnulty Jun 3, 2016
f12a36c
make travis use tox
tobiasmcnulty Jun 3, 2016
d6c527e
specify only one python since tox does that now
tobiasmcnulty Jun 3, 2016
aed2def
pass TRAVIS environment variable into tox environment
tobiasmcnulty Jun 3, 2016
615585b
don't run flake8 as part of travis, use tox
tobiasmcnulty Jun 4, 2016
dcc81ab
Update tox and travis
vkurup Oct 7, 2017
2668094
Include flake8 on travis
vkurup Oct 7, 2017
d450cff
Workaround Travis bug
vkurup Oct 7, 2017
77d21e8
Try to get coverage working
vkurup Oct 7, 2017
79281be
Remove support for Python < 2.7, Django < 1.8
vkurup Oct 7, 2017
36589e9
Remove obsolete package
vkurup Oct 7, 2017
f6839e4
Django 1.9 support
vkurup Oct 7, 2017
3b562d4
Django 1.10 support
vkurup Oct 7, 2017
b7295ff
Tell Travis to test 1.10
vkurup Oct 7, 2017
6a547d8
Django 1.11 support
vkurup Oct 7, 2017
7bd4f77
Cleanup and docs updates
vkurup Oct 7, 2017
24c9779
Add tests for failure when running .values() or .values_list()
vkurup Oct 8, 2017
f98e32f
Typo
vkurup Oct 9, 2017
4f1fb8e
Attempt to tease out Django 1.8 differences
vkurup Oct 11, 2017
085bf35
enable --keepdb for faster test runs
tobiasmcnulty Oct 12, 2017
52d4ea6
try to simplify Django 1.8/Django 1.11 compatibility
tobiasmcnulty Oct 12, 2017
c220d61
Merge pull request #1 from tobiasmcnulty/django-upgrades
vkurup Oct 13, 2017
096f2b2
Minor doc updates
vkurup Oct 13, 2017
251e5e6
Bump version for release
vkurup Oct 13, 2017
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 14 additions & 26 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,44 +2,32 @@ language: python
services:
- memcached
- redis-server
# Use Travis' build matrix and exclude functions rather than running tox
# directly so that we can run the builds in parallel and get coverage reports
# for each Python/Django version combo
python:
- "2.6"
- "2.7"
- "3.3"
- "3.4"
# python selected by tox, so specify only one version here
- "3.6"
addons:
postgresql: "9.4"
postgresql: "9.5"
before_install:
# work around https://github.com/travis-ci/travis-ci/issues/8363
- pyenv global system 3.5
before_script:
- psql -c 'create database travis_ci_test;' -U postgres
- psql -c 'create database travis_ci_test2;' -U postgres
install:
- pip install -U pip # make sure we have the latest version
- pip install -e .
- pip install -r requirements/py`echo $TRAVIS_PYTHON_VERSION|cut -d'.' -f1`.txt "$DJANGO_SPEC"
- pip install tox
- pip install coveralls
script:
- python run_tests.py --with-coverage
- flake8 --ignore=E731,E402 .
- tox -e $TOX_ENV
after_success:
- coveralls
env:
- DJANGO_SPEC="Django>=1.4,<1.5"
- DJANGO_SPEC="Django>=1.5,<1.6"
- DJANGO_SPEC="Django>=1.6,<1.7"
- DJANGO_SPEC="Django>=1.7,<1.8"
- DJANGO_SPEC="Django>=1.8,<1.9"
matrix:
exclude:
- python: "2.6"
env: DJANGO_SPEC="Django>=1.7,<1.8"
- python: "2.6"
env: DJANGO_SPEC="Django>=1.8,<1.9"
- python: "3.3"
env: DJANGO_SPEC="Django>=1.4,<1.5"
- python: "3.4"
env: DJANGO_SPEC="Django>=1.4,<1.5"
- TOX_ENV="dj18-py27,dj18-py34,dj18-py35"
- TOX_ENV="dj19-py27,dj19-py34,dj19-py35,dj19-py36"
- TOX_ENV="dj110-py27,dj110-py34,dj110-py35,dj110-py36"
- TOX_ENV="dj111-py27,dj111-py34,dj111-py35,dj111-py36"
- TOX_ENV="py27-flake8,py36-flake8"
- TOX_ENV="docs"
# Adding sudo: False tells Travis to use their container-based infrastructure, which is somewhat faster.
sudo: False
6 changes: 1 addition & 5 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ For full docs, see https://cache-machine.readthedocs.org/en/latest/.
Requirements
------------

Cache Machine works with Django 1.4-1.8 and Python 2.6, 2.7, 3.3 and 3.4.
Cache Machine works with Django 1.8-1.11 and Python 2.7, 3.4, 3.5 and 3.6.


Installation
Expand All @@ -27,10 +27,6 @@ Get it from `pypi <http://pypi.python.org/pypi/django-cache-machine>`_::

pip install django-cache-machine

or `github <http://github.com/django-cache-machine/django-cache-machine>`_::

pip install -e git://github.com/django-cache-machine/django-cache-machine.git#egg=django-cache-machine


Running Tests
-------------
Expand Down
2 changes: 1 addition & 1 deletion caching/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import unicode_literals

VERSION = ('0', '9', '1')
VERSION = ('1', '0', '0')
__version__ = '.'.join(VERSION)
Empty file removed caching/backends/__init__.py
Empty file.
43 changes: 0 additions & 43 deletions caching/backends/locmem.py

This file was deleted.

32 changes: 0 additions & 32 deletions caching/backends/memcached.py

This file was deleted.

125 changes: 65 additions & 60 deletions caching/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,29 @@
import functools
import logging

import django
from django.core.cache.backends.base import DEFAULT_TIMEOUT
from django.db import models
from django.db.models import signals
from django.db.models.sql import query, EmptyResultSet
from django.utils import encoding

from caching import config
from .compat import DEFAULT_TIMEOUT
from .invalidation import invalidator, flush_key, make_key, byid, cache
from caching.invalidation import invalidator, flush_key, make_key, byid, cache

try:
# ModelIterable is defined in Django 1.9+, and if it's present, we use it
# iterate over our results.
from django.db.models.query import ModelIterable
except ImportError:
# If not, define a Django 1.8-compatible stub we can use instead.
class ModelIterable(object):
def __init__(self, queryset):
self.queryset = queryset

class NullHandler(logging.Handler):

def emit(self, record):
pass

def __iter__(self):
return super(CachingQuerySet, self.queryset).iterator()

log = logging.getLogger('caching')
log.addHandler(NullHandler())


class CachingManager(models.Manager):
Expand All @@ -32,9 +36,6 @@ class CachingManager(models.Manager):
def get_queryset(self):
return CachingQuerySet(self.model, using=self._db)

if django.VERSION < (1, 6):
get_query_set = get_queryset

def contribute_to_class(self, cls, name):
signals.post_save.connect(self.post_save, sender=cls)
signals.post_delete.connect(self.post_delete, sender=cls)
Expand Down Expand Up @@ -62,20 +63,20 @@ def no_cache(self):
return self.cache(config.NO_CACHE)


class CacheMachine(object):
class CachingModelIterable(ModelIterable):
"""
Handles all the cache management for a QuerySet.

Takes the string representation of a query and a function that can be
called to get an iterator over some database results.
Takes a queryset, and optionally takes a function that can be called to
get an iterator over some database results. The function is only needed
for RawQuerySets currently.
"""

def __init__(self, model, query_string, iter_function, timeout=DEFAULT_TIMEOUT, db='default'):
self.model = model
self.query_string = query_string
self.iter_function = iter_function
self.timeout = timeout
self.db = db
def __init__(self, queryset, *args, **kwargs):
self.iter_function = kwargs.pop('iter_function', None)
self.timeout = kwargs.pop('timeout', queryset.timeout)
self.db = kwargs.pop('db', queryset.db)
super(CachingModelIterable, self).__init__(queryset, *args, **kwargs)

def query_key(self):
"""
Expand All @@ -86,16 +87,38 @@ def query_key(self):
master), throwing a Django ValueError in the process. Django prevents
cross DB model saving among related objects.
"""
query_db_string = 'qs:%s::db:%s' % (self.query_string, self.db)
query_db_string = 'qs:%s::db:%s' % (self.queryset.query_key(), self.db)
return make_key(query_db_string, with_locale=False)

def cache_objects(self, objects, query_key):
"""Cache query_key => objects, then update the flush lists."""
log.debug('query_key: %s' % query_key)
query_flush = flush_key(self.queryset.query_key())
log.debug('query_flush: %s' % query_flush)
cache.add(query_key, objects, timeout=self.timeout)
invalidator.cache_objects(self.queryset.model, objects, query_key, query_flush)

def __iter__(self):
if self.iter_function is not None:
# This a RawQuerySet. Use the function passed into
# the class constructor.
iterator = self.iter_function
else:
# Otherwise, use super().__iter__.
iterator = super(CachingModelIterable, self).__iter__

if self.timeout == config.NO_CACHE:
# no cache, just iterate and return the results
for obj in iterator():
yield obj
return

# Try to fetch from the cache.
try:
query_key = self.query_key()
except query.EmptyResultSet:
raise StopIteration
return

# Try to fetch from the cache.
cached = cache.get(query_key)
if cached is not None:
log.debug('cache hit: %s' % query_key)
Expand All @@ -104,28 +127,19 @@ def __iter__(self):
yield obj
return

# Do the database query, cache it once we have all the objects.
iterator = self.iter_function()
# Use the special FETCH_BY_ID iterator if configured.
if config.FETCH_BY_ID and hasattr(self.queryset, 'fetch_by_id'):
iterator = self.queryset.fetch_by_id

# No cached results. Do the database query, and cache it once we have
# all the objects.
to_cache = []
try:
while True:
obj = next(iterator)
obj.from_cache = False
to_cache.append(obj)
yield obj
except StopIteration:
if to_cache or config.CACHE_EMPTY_QUERYSETS:
self.cache_objects(to_cache, query_key)
raise
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change loses the StopIteration re-raised here -- is that intentional?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ahh... I think I missed that (I thought I was just simplifying an iteration through a iterator). I'll have to see if I can find a way to test that, or maybe just revert back to the old version of the code.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually, this might be a part of Python that I just don't understand well. I'd think that I could just fix this by adding a raise StopIteration on line 128 (at the same indentation level as the if on line 126). But that seems superfluous, since it's the last line of the method and the iteration will end anyways. Am I missing something?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


def cache_objects(self, objects, query_key):
"""Cache query_key => objects, then update the flush lists."""
log.debug('query_key: %s' % query_key)
query_flush = flush_key(self.query_string)
log.debug('query_flush: %s' % query_flush)
cache.add(query_key, objects, timeout=self.timeout)
invalidator.cache_objects(self.model, objects, query_key, query_flush)
for obj in iterator():
obj.from_cache = False
to_cache.append(obj)
yield obj
if to_cache or config.CACHE_EMPTY_QUERYSETS:
self.cache_objects(to_cache, query_key)


class CachingQuerySet(models.query.QuerySet):
Expand All @@ -135,6 +149,7 @@ class CachingQuerySet(models.query.QuerySet):
def __init__(self, *args, **kw):
super(CachingQuerySet, self).__init__(*args, **kw)
self.timeout = DEFAULT_TIMEOUT
self._iterable_class = CachingModelIterable

def __getstate__(self):
"""
Expand Down Expand Up @@ -163,18 +178,7 @@ def query_key(self):
return sql % params

def iterator(self):
iterator = super(CachingQuerySet, self).iterator
if self.timeout == config.NO_CACHE:
return iter(iterator())
else:
try:
# Work-around for Django #12717.
query_string = self.query_key()
except query.EmptyResultSet:
return iterator()
if config.FETCH_BY_ID:
iterator = self.fetch_by_id
return iter(CacheMachine(self.model, query_string, iterator, self.timeout, db=self.db))
return self._iterable_class(self)

def fetch_by_id(self):
"""
Expand Down Expand Up @@ -309,10 +313,11 @@ def __iter__(self):
while True:
yield next(iterator)
else:
sql = self.raw_query % tuple(self.params)
for obj in CacheMachine(self.model, sql, iterator, timeout=self.timeout):
for obj in CachingModelIterable(self, iter_function=iterator, timeout=self.timeout):
yield obj
raise StopIteration

def query_key(self):
return self.raw_query % tuple(self.params)


def _function_cache_key(key):
Expand Down Expand Up @@ -388,7 +393,7 @@ def __init__(self, obj, func):
self.cache = {}

def __call__(self, *args, **kwargs):
k = lambda o: o.cache_key if hasattr(o, 'cache_key') else o
def k(o): return o.cache_key if hasattr(o, 'cache_key') else o
arg_keys = list(map(k, args))
kwarg_keys = [(key, k(val)) for key, val in list(kwargs.items())]
key_parts = ('m', self.obj.cache_key, self.func.__name__,
Expand Down
14 changes: 0 additions & 14 deletions caching/compat.py

This file was deleted.

2 changes: 1 addition & 1 deletion caching/ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def process_cache_arguments(self, args):

def _cache_support(self, name, obj, timeout, extra, caller):
"""Cache helper callback."""
if settings.TEMPLATE_DEBUG:
if settings.DEBUG:
return caller()
extra = ':'.join(map(encoding.smart_str, extra))
key = 'fragment:%s:%s' % (name, extra)
Expand Down
Loading