Commit 238e9b55 authored by Ales Zoulek's avatar Ales Zoulek

merge

parents e05a534e 19415eac
...@@ -6,3 +6,5 @@ Noah Kantrowitz / coderanger <http://github.com/coderanger> ...@@ -6,3 +6,5 @@ Noah Kantrowitz / coderanger <http://github.com/coderanger>
Martin Mahner / bartTC <http://github.com/bartTC> Martin Mahner / bartTC <http://github.com/bartTC>
Timothée Peignier / cyberdelia <https://github.com/cyberdelia> Timothée Peignier / cyberdelia <https://github.com/cyberdelia>
Lior Sion / liorsion <https://github.com/liorsion> Lior Sion / liorsion <https://github.com/liorsion>
Ales Zoulek / aleszoulek <https://github.com/aleszoulek>
James Aylett / jaylett <https://github.com/jaylett>
...@@ -15,7 +15,6 @@ except ImportError: ...@@ -15,7 +15,6 @@ except ImportError:
"Redis cache backend requires the 'redis-py' library") "Redis cache backend requires the 'redis-py' library")
class CacheKey(object): class CacheKey(object):
""" """
A stub string class that we can use to check if a key was created already. A stub string class that we can use to check if a key was created already.
...@@ -29,6 +28,9 @@ class CacheKey(object): ...@@ -29,6 +28,9 @@ class CacheKey(object):
def __str__(self): def __str__(self):
return self.__unicode__() return self.__unicode__()
def __repr__(self):
return self.__unicode__()
def __unicode__(self): def __unicode__(self):
return smart_str(self._key) return smart_str(self._key)
...@@ -38,7 +40,11 @@ class CacheClass(BaseCache): ...@@ -38,7 +40,11 @@ class CacheClass(BaseCache):
""" """
Connect to Redis, and set up cache backend. Connect to Redis, and set up cache backend.
""" """
self._init(server, params)
def _init(self, server, params):
super(CacheClass, self).__init__(params) super(CacheClass, self).__init__(params)
self._initargs = { 'server': server, 'params': params }
options = params.get('OPTIONS', {}) options = params.get('OPTIONS', {})
password = params.get('password', options.get('PASSWORD', None)) password = params.get('password', options.get('PASSWORD', None))
db = params.get('db', options.get('DB', 1)) db = params.get('db', options.get('DB', 1))
...@@ -55,8 +61,13 @@ class CacheClass(BaseCache): ...@@ -55,8 +61,13 @@ class CacheClass(BaseCache):
else: else:
host = server or 'localhost' host = server or 'localhost'
port = 6379 port = 6379
self._cache = redis.Redis(host=host, port=port, db=db, password=password) self._client = redis.Redis(host=host, port=port, db=db, password=password)
def __getstate__(self):
return self._initargs
def __setstate__(self, state):
self._init(**state)
def make_key(self, key, version=None): def make_key(self, key, version=None):
""" """
...@@ -74,7 +85,7 @@ class CacheClass(BaseCache): ...@@ -74,7 +85,7 @@ class CacheClass(BaseCache):
Returns ``True`` if the object was added, ``False`` if not. Returns ``True`` if the object was added, ``False`` if not.
""" """
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
if self._cache.exists(key): if self._client.exists(key):
return False return False
return self.set(key, value, timeout) return self.set(key, value, timeout)
...@@ -85,23 +96,31 @@ class CacheClass(BaseCache): ...@@ -85,23 +96,31 @@ class CacheClass(BaseCache):
Returns unpickled value if key is found, the default if not. Returns unpickled value if key is found, the default if not.
""" """
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
value = self._cache.get(key) value = self._client.get(key)
if value is None: if value is None:
return default return default
return self.unpickle(value) try:
result = int(value)
except (ValueError, TypeError):
result = self.unpickle(value)
return result
def set(self, key, value, timeout=None, version=None, client=None): def set(self, key, value, timeout=None, version=None, client=None):
""" """
Persist a value to the cache, and set an optional expiration time. Persist a value to the cache, and set an optional expiration time.
""" """
if not client: if not client:
client = self._cache client = self._client
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
# get timout
if not timeout: if not timeout:
timeout = self.default_timeout timeout = self.default_timeout
# store the pickled value try:
result = client.setex(key, pickle.dumps(value), int(timeout)) value = int(value)
except (ValueError, TypeError):
result = client.setex(key, pickle.dumps(value), int(timeout))
else:
result = client.setex(key, value, int(timeout))
# result is a boolean # result is a boolean
return result return result
...@@ -109,7 +128,7 @@ class CacheClass(BaseCache): ...@@ -109,7 +128,7 @@ class CacheClass(BaseCache):
""" """
Remove a key from the cache. Remove a key from the cache.
""" """
self._cache.delete(self.make_key(key, version=version)) self._client.delete(self.make_key(key, version=version))
def delete_many(self, keys, version=None): def delete_many(self, keys, version=None):
""" """
...@@ -117,14 +136,14 @@ class CacheClass(BaseCache): ...@@ -117,14 +136,14 @@ class CacheClass(BaseCache):
""" """
if keys: if keys:
keys = map(lambda key: self.make_key(key, version=version), keys) keys = map(lambda key: self.make_key(key, version=version), keys)
self._cache.delete(*keys) self._client.delete(*keys)
def clear(self): def clear(self):
""" """
Flush all cache keys. Flush all cache keys.
""" """
# TODO : potential data loss here, should we only delete keys based on the correct version ? # TODO : potential data loss here, should we only delete keys based on the correct version ?
self._cache.flushdb() self._client.flushdb()
def unpickle(self, value): def unpickle(self, value):
""" """
...@@ -142,11 +161,14 @@ class CacheClass(BaseCache): ...@@ -142,11 +161,14 @@ class CacheClass(BaseCache):
recovered_data = SortedDict() recovered_data = SortedDict()
new_keys = map(lambda key: self.make_key(key, version=version), keys) new_keys = map(lambda key: self.make_key(key, version=version), keys)
map_keys = dict(zip(new_keys, keys)) map_keys = dict(zip(new_keys, keys))
results = self._cache.mget(new_keys) results = self._client.mget(new_keys)
for key, value in zip(new_keys, results): for key, value in zip(new_keys, results):
if value is None: if value is None:
continue continue
value = self.unpickle(value) try:
value = int(value)
except (ValueError, TypeError):
value = self.unpickle(value)
if isinstance(value, basestring): if isinstance(value, basestring):
value = smart_unicode(value) value = smart_unicode(value)
recovered_data[map_keys[key]] = value recovered_data[map_keys[key]] = value
...@@ -160,11 +182,27 @@ class CacheClass(BaseCache): ...@@ -160,11 +182,27 @@ class CacheClass(BaseCache):
If timeout is given, that timeout will be used for the key; otherwise If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used. the default cache timeout will be used.
""" """
pipeline = self._cache.pipeline() pipeline = self._client.pipeline()
for key, value in data.iteritems(): for key, value in data.iteritems():
self.set(key, value, timeout, version=version, client=pipeline) self.set(key, value, timeout, version=version, client=pipeline)
pipeline.execute() pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
key = self.make_key(key, version=version)
exists = self._client.exists(key)
if not exists:
raise ValueError("Key '%s' not found" % key)
try:
value = self._client.incr(key, delta)
except redis.ResponseError:
value = self.get(key) + 1
self.set(key, value)
return value
class RedisCache(CacheClass): class RedisCache(CacheClass):
""" """
...@@ -188,7 +226,7 @@ class RedisCache(CacheClass): ...@@ -188,7 +226,7 @@ class RedisCache(CacheClass):
version = self.version version = self.version
old_key = self.make_key(key, version) old_key = self.make_key(key, version)
value = self.get(old_key, version=version) value = self.get(old_key, version=version)
ttl = self._cache.ttl(old_key) ttl = self._client.ttl(old_key)
if value is None: if value is None:
raise ValueError("Key '%s' not found" % key) raise ValueError("Key '%s' not found" % key)
new_key = self.make_key(key, version=version+delta) new_key = self.make_key(key, version=version+delta)
......
...@@ -5,7 +5,7 @@ setup( ...@@ -5,7 +5,7 @@ setup(
url = "http://github.com/sebleier/django-redis-cache/", url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier", author = "Sean Bleier",
author_email = "sebleier@gmail.com", author_email = "sebleier@gmail.com",
version = "0.6.1", version = "0.8.2",
packages = ["redis_cache"], packages = ["redis_cache"],
description = "Redis Cache Backend for Django", description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',], install_requires=['redis>=2.4.5',],
......
...@@ -34,7 +34,7 @@ class RedisCacheTests(TestCase): ...@@ -34,7 +34,7 @@ class RedisCacheTests(TestCase):
def reset_pool(self): def reset_pool(self):
if hasattr(self, 'cache'): if hasattr(self, 'cache'):
self.cache._cache.connection_pool.disconnect() self.cache._client.connection_pool.disconnect()
def get_cache(self, backend=None): def get_cache(self, backend=None):
if VERSION[0] == 1 and VERSION[1] < 3: if VERSION[0] == 1 and VERSION[1] < 3:
...@@ -52,9 +52,9 @@ class RedisCacheTests(TestCase): ...@@ -52,9 +52,9 @@ class RedisCacheTests(TestCase):
def test_default_initialization(self): def test_default_initialization(self):
self.reset_pool() self.reset_pool()
self.cache = self.get_cache('redis_cache.cache://127.0.0.1') self.cache = self.get_cache('redis_cache.cache://127.0.0.1')
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['host'], '127.0.0.1') self.assertEqual(self.cache._client.connection_pool.connection_kwargs['host'], '127.0.0.1')
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['db'], 1) self.assertEqual(self.cache._client.connection_pool.connection_kwargs['db'], 1)
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['port'], 6379) self.assertEqual(self.cache._client.connection_pool.connection_kwargs['port'], 6379)
def test_simple(self): def test_simple(self):
# Simple cache set/get works # Simple cache set/get works
...@@ -83,6 +83,20 @@ class RedisCacheTests(TestCase): ...@@ -83,6 +83,20 @@ class RedisCacheTests(TestCase):
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'}) self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'}) self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
def test_get_many_with_manual_integer_insertion(self):
keys = ['a', 'b', 'c', 'd']
cache_keys = map(self.cache.make_key, keys)
# manually set integers and then get_many
for i, key in enumerate(cache_keys):
self.cache._client.set(key, i)
self.assertEqual(self.cache.get_many(keys), {'a': 0, 'b': 1, 'c': 2, 'd': 3})
def test_get_many_with_automatic_integer_insertion(self):
keys = ['a', 'b', 'c', 'd']
for i, key in enumerate(keys):
self.cache.set(key, i)
self.assertEqual(self.cache.get_many(keys), {'a': 0, 'b': 1, 'c': 2, 'd': 3})
def test_delete(self): def test_delete(self):
# Cache keys can be deleted # Cache keys can be deleted
self.cache.set("key1", "spam") self.cache.set("key1", "spam")
...@@ -194,7 +208,7 @@ class RedisCacheTests(TestCase): ...@@ -194,7 +208,7 @@ class RedisCacheTests(TestCase):
def test_set_expiration_timeout_None(self): def test_set_expiration_timeout_None(self):
key, value = self.cache.make_key('key'), 'value' key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value); self.cache.set(key, value);
self.assertTrue(self.cache._cache.ttl(key) > 0) self.assertTrue(self.cache._client.ttl(key) > 0)
def test_unicode(self): def test_unicode(self):
# Unicode values can be cached # Unicode values can be cached
...@@ -277,6 +291,33 @@ class RedisCacheTests(TestCase): ...@@ -277,6 +291,33 @@ class RedisCacheTests(TestCase):
self.assertEqual(self.cache.get(old_key), None) self.assertEqual(self.cache.get(old_key), None)
self.assertEqual(self.cache.get(new_key), 'spam') self.assertEqual(self.cache.get(new_key), 'spam')
def test_incr_with_pickled_integer(self):
"Testing case where there exists a pickled integer and we increment it"
number = 42
key = self.cache.make_key("key")
# manually set value using the redis client
self.cache._client.set(key, pickle.dumps(number))
new_value = self.cache.incr(key)
self.assertEqual(new_value, number + 1)
# Test that the pickled value was converted to an integer
value = int(self.cache._client.get(key))
self.assertTrue(isinstance(value, int))
# now that the value is an integer, let's increment it again.
new_value = self.cache.incr(key, 7)
self.assertEqual(new_value, number + 8)
def test_pickling_cache_object(self):
p = pickle.dumps(self.cache)
cache = pickle.loads(p)
# Now let's do a simple operation using the unpickled cache object
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertEqual(result, False)
self.assertEqual(cache.get("addkey1"), "value")
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment