Unverified Commit e76d9c10 authored by Sean Bleier's avatar Sean Bleier Committed by GitHub

Merge pull request #162 from sebleier/unstable

Update to support Redis-py 3.x
parents 777d4d55 1f815e1f
......@@ -19,3 +19,4 @@ MANIFEST
.venv
redis/
*/_build/
build/*
language: python
python:
- 2.7
- 3.4
- 3.5
- 3.6
env:
......@@ -14,8 +13,6 @@ matrix:
env: DJANGO_VERSION='>=2.0,<2.1'
- python: 2.7
env: DJANGO_VERSION='>=2.1,<2.2'
- python: 3.4
env: DJANGO_VERSION='>=2.1,<2.2'
# command to run tests
install: ./install_redis.sh
script: make test DJANGO_VERSION=$DJANGO_VERSION
......
......@@ -21,6 +21,16 @@ Docs can be found at http://django-redis-cache.readthedocs.org/en/latest/.
Changelog
=========
2.0.0
-----
* Adds support for redis-py >= 3.0.
* Drops support for Redis 2.6.
* Drops support for Python 3.4.
* Removes custom ``expire`` method in lieu of Django's ``touch``.
* Removes ``CacheKey`` in favor of string literals.
1.8.0
-----
......
......@@ -93,6 +93,13 @@ Standard Django Cache API
:param version: Version of key
:type version: Integer or None
.. function:: touch(self, key, timeout):
Updates the timeout on a key.
:param key: Location of the value
:rtype: bool
Cache Methods Provided by django-redis-cache
......@@ -124,14 +131,22 @@ Cache Methods Provided by django-redis-cache
:param version: Version of the keys
.. function:: get_or_set(self, key, func[, timeout=None]):
.. function:: get_or_set(self, key, func[, timeout=None, lock_timeout=None, stale_cache_timeout=None]):
Get a value from the cache or call ``func`` to set it and return it.
Retrieves a key value from the cache and sets the value if it does not exist.
This implementation is slightly more advanced that Django's. It provides thundering herd
protection, which prevents multiple threads/processes from calling the value-generating
function at the same time.
:param key: Location of the value
:param func: Callable used to set the value if key does not exist.
:param timeout: Number of seconds to hold value in cache.
:param timeout: Time in seconds that value at key is considered fresh.
:type timeout: Number of seconds or None
:param lock_timeout: Time in seconds that the lock will stay active and prevent other threads from acquiring the lock.
:type lock_timeout: Number of seconds or None
:param stale_cache_timeout: Time in seconds that the stale cache will remain after the key has expired. If ``None`` is specified, the stale value will remain indefinitely.
:type stale_cache_timeout: Number of seconds or None
.. function:: reinsert_keys(self):
......@@ -147,12 +162,9 @@ Cache Methods Provided by django-redis-cache
:param key: Location of the value
:rtype: bool
.. function:: lock(self, key, timeout=None, sleep=0.1, blocking_timeout=None, thread_local=True)
.. function:: expire(self, key, timeout):
Set the expire time on a key
:param key: Location of the value
:rtype: bool
See docs for `redis-py`_.
.. _redis-py: https://redis-py.readthedocs.io/en/latest/_modules/redis/client.html#Redis.lock
......@@ -4,7 +4,7 @@ Intro and Quick Start
Intro
=====
`django-redis-cache`_ is a cache backend for the `Django`_ webframework. It
`django-redis-cache`_ is a cache backend for the `Django`_ web framework. It
uses the `redis`_ server, which is a in-memory key-value data structure server.
Similar to the great `Memcached`_ in performance, it has several features that
makes it more appealing.
......@@ -24,7 +24,7 @@ makes it more appealing.
* Many more.
Many of these features are irrelevant to caching, but can be used by other
areas of a web stack and therefore offer a compelling case to simplify your
areas of a web stack and therefore offers a compelling case to simplify your
infrastructure.
......@@ -35,9 +35,9 @@ Quick Start
**Recommended:**
* `redis`_ >= 2.4
* `redis`_ >= 2.8
* `redis-py`_ >= 2.10.3
* `redis-py`_ >= 3.0.0
* `python`_ >= 2.7
......@@ -59,7 +59,7 @@ of redis. Start the server by running ``./src/redis-server``
}
**Warning: By default, django-redis-cache set keys in the database 1 of Redis. By default, a session with redis-cli start on database 0. Switch to database 1 with** ``SELECT 1``.
.. _Django: https://www.djangoproject.com/
.. _django-redis-cache: http://github.com/sebleier/django-redis-cache
.. _redis-py: http://github.com/andymccurdy/redis-py/
......
This diff is collapsed.
......@@ -29,9 +29,8 @@ class ShardedRedisCache(BaseRedisCache):
"""
clients = defaultdict(list)
for key in keys:
clients[self.get_client(key, write)].append(
self.make_key(key, version)
)
versioned_key = self.make_key(key, version=version)
clients[self.get_client(versioned_key, write)].append(versioned_key)
return clients
####################
......@@ -63,41 +62,28 @@ class ShardedRedisCache(BaseRedisCache):
data = {}
clients = self.shard(keys, version=version)
for client, versioned_keys in clients.items():
original_keys = [key._original_key for key in versioned_keys]
versioned_keys = [self.make_key(key, version=version) for key in keys]
data.update(
self._get_many(
client,
original_keys,
versioned_keys=versioned_keys
)
self._get_many(client, keys, versioned_keys=versioned_keys)
)
return data
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
Set multiple values in the cache at once from a dict of key/value pairs.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
timeout = self.get_timeout(timeout)
versioned_key_to_key = {self.make_key(key, version=version): key for key in data.keys()}
clients = self.shard(versioned_key_to_key.values(), write=True, version=version)
clients = self.shard(data.keys(), write=True, version=version)
if timeout is None:
for client, keys in clients.items():
subset = {}
for key in keys:
subset[key] = self.prep_value(data[key._original_key])
self._set_many(client, subset)
return
for client, keys in clients.items():
for client, versioned_keys in clients.items():
pipeline = client.pipeline()
for key in keys:
value = self.prep_value(data[key._original_key])
self._set(pipeline, key, value, timeout)
for versioned_key in versioned_keys:
value = self.prep_value(data[versioned_key_to_key[versioned_key]])
self._set(pipeline, versioned_key, value, timeout)
pipeline.execute()
def incr_version(self, key, delta=1, version=None):
......@@ -113,7 +99,7 @@ class ShardedRedisCache(BaseRedisCache):
old = self.make_key(key, version=version)
new = self.make_key(key, version=version + delta)
return self._incr_version(client, old, new, delta, version)
return self._incr_version(client, old, new, key, delta, version)
#####################
# Extra api methods #
......
......@@ -56,25 +56,18 @@ class RedisCache(BaseRedisCache):
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
Set multiple values in the cache at once from a dict of key/value pairs.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
timeout = self.get_timeout(timeout)
versioned_keys = self.make_keys(data.keys(), version=version)
if timeout is None:
new_data = {}
for key in versioned_keys:
new_data[key] = self.prep_value(data[key._original_key])
return self._set_many(self.master_client, new_data)
pipeline = self.master_client.pipeline()
for key in versioned_keys:
value = self.prep_value(data[key._original_key])
self._set(pipeline, key, value, timeout)
for key, value in data.items():
value = self.prep_value(value)
versioned_key = self.make_key(key, version=version)
self._set(pipeline, versioned_key, value, timeout)
pipeline.execute()
def incr_version(self, key, delta=1, version=None):
......@@ -89,7 +82,7 @@ class RedisCache(BaseRedisCache):
old = self.make_key(key, version)
new = self.make_key(key, version=version + delta)
return self._incr_version(self.master_client, old, new, delta, version)
return self._incr_version(self.master_client, old, new, key, delta, version)
#####################
# Extra api methods #
......
KEY_EXPIRED = -2
KEY_NON_VOLATILE = -1
......@@ -6,30 +6,10 @@ from django.utils import six
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.six.moves.urllib.parse import parse_qs, urlparse
from redis._compat import unicode
from redis.connection import SSLConnection
@python_2_unicode_compatible
class CacheKey(object):
"""
A stub string class that we can use to check if a key was created already.
"""
def __init__(self, key, versioned_key):
self._original_key = key
self._versioned_key = versioned_key
def __eq__(self, other):
return self._versioned_key == other
def __str__(self):
return force_text(self._versioned_key)
def __hash__(self):
return hash(self._versioned_key)
__repr__ = __str__
def get_servers(location):
"""Returns a list of servers given the server argument passed in from
Django.
......
......@@ -2,4 +2,4 @@ hiredis==0.2.0
django-nose==1.4.4
nose==1.3.6
msgpack-python==0.4.6
pyyaml==3.11
pyyaml>=4.2b1
......@@ -5,11 +5,11 @@ setup(
url="http://github.com/sebleier/django-redis-cache/",
author="Sean Bleier",
author_email="sebleier@gmail.com",
version="1.8.1",
version="2.0.0",
license="BSD",
packages=["redis_cache", "redis_cache.backends"],
description="Redis Cache Backend for Django",
install_requires=['redis==2.10.6'],
install_requires=['redis<4.0'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
......
......@@ -4,6 +4,7 @@ from __future__ import unicode_literals
from hashlib import sha1
import os
import subprocess
import threading
import time
......@@ -21,6 +22,7 @@ import redis
from tests.testapp.models import Poll, expensive_calculation
from redis_cache.cache import RedisCache, pool
from redis_cache.constants import KEY_EXPIRED, KEY_NON_VOLATILE
from redis_cache.utils import get_servers, parse_connection_kwargs
......@@ -299,13 +301,12 @@ class BaseRedisTestCase(SetupMixin):
def test_set_expiration_timeout_zero(self):
key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value, timeout=0)
self.assertIsNone(self.cache.get_client(key).ttl(key))
self.assertIn(key, self.cache)
self.assertEqual(self.cache.get_client(key).ttl(key), KEY_EXPIRED)
self.assertNotIn(key, self.cache)
def test_set_expiration_timeout_negative(self):
key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value, timeout=-1)
self.assertIsNone(self.cache.get_client(key).ttl(key))
self.assertNotIn(key, self.cache)
def test_unicode(self):
......@@ -481,9 +482,9 @@ class BaseRedisTestCase(SetupMixin):
self.cache.set('b', 'b', 5)
self.cache.reinsert_keys()
self.assertEqual(self.cache.get('a'), 'a')
self.assertGreater(self.cache.get_client('a').ttl(self.cache.make_key('a')), 1)
self.assertGreater(self.cache.ttl('a'), 1)
self.assertEqual(self.cache.get('b'), 'b')
self.assertGreater(self.cache.get_client('b').ttl(self.cache.make_key('b')), 1)
self.assertGreater(self.cache.ttl('a'), 1)
def test_get_or_set(self):
......@@ -510,6 +511,66 @@ class BaseRedisTestCase(SetupMixin):
self.assertEqual(expensive_function.num_calls, 2)
self.assertEqual(value, 42)
def test_get_or_set_serving_from_stale_value(self):
def expensive_function(x):
time.sleep(.5)
expensive_function.num_calls += 1
return x
expensive_function.num_calls = 0
self.assertEqual(expensive_function.num_calls, 0)
results = {}
def thread_worker(thread_id, return_value, timeout, lock_timeout, stale_cache_timeout):
value = self.cache.get_or_set(
'key',
lambda: expensive_function(return_value),
timeout,
lock_timeout,
stale_cache_timeout
)
results[thread_id] = value
thread_0 = threading.Thread(target=thread_worker, args=(0, 'a', 1, None, 1))
thread_1 = threading.Thread(target=thread_worker, args=(1, 'b', 1, None, 1))
thread_2 = threading.Thread(target=thread_worker, args=(2, 'c', 1, None, 1))
thread_3 = threading.Thread(target=thread_worker, args=(3, 'd', 1, None, 1))
thread_4 = threading.Thread(target=thread_worker, args=(4, 'e', 1, None, 1))
# First thread should complete and return its value
thread_0.start() # t = 0, valid from t = .5 - 1.5, stale from t = 1.5 - 2.5
# Second thread will start while the first thread is still working and return None.
time.sleep(.25) # t = .25
thread_1.start()
# Third thread will start after the first value is computed, but before it expires.
# its value.
time.sleep(.5) # t = .75
thread_2.start()
# Fourth thread will start after the first value has expired and will re-compute its value.
# valid from t = 2.25 - 3.25, stale from t = 3.75 - 4.75.
time.sleep(1) # t = 1.75
thread_3.start()
# Fifth thread will start after the fourth thread has started to compute its value, but
# before the first thread's stale cache has expired.
time.sleep(.25) # t = 2
thread_4.start()
thread_0.join()
thread_1.join()
thread_2.join()
thread_3.join()
thread_4.join()
self.assertEqual(results, {
0: 'a',
1: None,
2: 'a',
3: 'd',
4: 'a'
})
def assertMaxConnection(self, cache, max_num):
for client in cache.clients.values():
self.assertLessEqual(client.connection_pool._created_connections, max_num)
......@@ -581,21 +642,21 @@ class BaseRedisTestCase(SetupMixin):
self.cache.persist('a')
self.assertIsNone(self.cache.ttl('a'))
def test_expire_no_expiry_to_expire(self):
def test_touch_no_expiry_to_expire(self):
self.cache.set('a', 'a', timeout=None)
self.cache.expire('a', 10)
self.cache.touch('a', 10)
ttl = self.cache.ttl('a')
self.assertAlmostEqual(ttl, 10)
def test_expire_less(self):
def test_touch_less(self):
self.cache.set('a', 'a', timeout=20)
self.cache.expire('a', 10)
self.cache.touch('a', 10)
ttl = self.cache.ttl('a')
self.assertAlmostEqual(ttl, 10)
def test_expire_more(self):
def test_touch_more(self):
self.cache.set('a', 'a', timeout=10)
self.cache.expire('a', 20)
self.cache.touch('a', 20)
ttl = self.cache.ttl('a')
self.assertAlmostEqual(ttl, 20)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment