Commit 4fa93a81 authored by Sean Bleier's avatar Sean Bleier

Changed the attribute _cache to be _client on the cache class to be more semantically correct.

parent 54638d40
...@@ -15,7 +15,6 @@ except ImportError: ...@@ -15,7 +15,6 @@ except ImportError:
"Redis cache backend requires the 'redis-py' library") "Redis cache backend requires the 'redis-py' library")
class CacheKey(object): class CacheKey(object):
""" """
A stub string class that we can use to check if a key was created already. A stub string class that we can use to check if a key was created already.
...@@ -55,8 +54,7 @@ class CacheClass(BaseCache): ...@@ -55,8 +54,7 @@ class CacheClass(BaseCache):
else: else:
host = server or 'localhost' host = server or 'localhost'
port = 6379 port = 6379
self._cache = redis.Redis(host=host, port=port, db=db, password=password) self._client = redis.Redis(host=host, port=port, db=db, password=password)
def make_key(self, key, version=None): def make_key(self, key, version=None):
""" """
...@@ -74,7 +72,7 @@ class CacheClass(BaseCache): ...@@ -74,7 +72,7 @@ class CacheClass(BaseCache):
Returns ``True`` if the object was added, ``False`` if not. Returns ``True`` if the object was added, ``False`` if not.
""" """
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
if self._cache.exists(key): if self._client.exists(key):
return False return False
return self.set(key, value, timeout) return self.set(key, value, timeout)
...@@ -85,7 +83,7 @@ class CacheClass(BaseCache): ...@@ -85,7 +83,7 @@ class CacheClass(BaseCache):
Returns unpickled value if key is found, the default if not. Returns unpickled value if key is found, the default if not.
""" """
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
value = self._cache.get(key) value = self._client.get(key)
if value is None: if value is None:
return default return default
try: try:
...@@ -100,16 +98,16 @@ class CacheClass(BaseCache): ...@@ -100,16 +98,16 @@ class CacheClass(BaseCache):
Persist a value to the cache, and set an optional expiration time. Persist a value to the cache, and set an optional expiration time.
""" """
if not client: if not client:
client = self._cache client = self._client
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
if not timeout: if not timeout:
timeout = self.default_timeout timeout = self.default_timeout
try: try:
value = int(value) value = int(value)
except (ValueError, TypeError): except (ValueError, TypeError):
result = self._cache.setex(key, pickle.dumps(value), int(timeout)) result = self._client.setex(key, pickle.dumps(value), int(timeout))
else: else:
result = self._cache.setex(key, value, int(timeout)) result = self._client.setex(key, value, int(timeout))
# result is a boolean # result is a boolean
return result return result
...@@ -117,7 +115,7 @@ class CacheClass(BaseCache): ...@@ -117,7 +115,7 @@ class CacheClass(BaseCache):
""" """
Remove a key from the cache. Remove a key from the cache.
""" """
self._cache.delete(self.make_key(key, version=version)) self._client.delete(self.make_key(key, version=version))
def delete_many(self, keys, version=None): def delete_many(self, keys, version=None):
""" """
...@@ -125,14 +123,14 @@ class CacheClass(BaseCache): ...@@ -125,14 +123,14 @@ class CacheClass(BaseCache):
""" """
if keys: if keys:
keys = map(lambda key: self.make_key(key, version=version), keys) keys = map(lambda key: self.make_key(key, version=version), keys)
self._cache.delete(*keys) self._client.delete(*keys)
def clear(self): def clear(self):
""" """
Flush all cache keys. Flush all cache keys.
""" """
# TODO : potential data loss here, should we only delete keys based on the correct version ? # TODO : potential data loss here, should we only delete keys based on the correct version ?
self._cache.flushdb() self._client.flushdb()
def unpickle(self, value): def unpickle(self, value):
""" """
...@@ -150,7 +148,7 @@ class CacheClass(BaseCache): ...@@ -150,7 +148,7 @@ class CacheClass(BaseCache):
recovered_data = SortedDict() recovered_data = SortedDict()
new_keys = map(lambda key: self.make_key(key, version=version), keys) new_keys = map(lambda key: self.make_key(key, version=version), keys)
map_keys = dict(zip(new_keys, keys)) map_keys = dict(zip(new_keys, keys))
results = self._cache.mget(new_keys) results = self._client.mget(new_keys)
for key, value in zip(new_keys, results): for key, value in zip(new_keys, results):
if value is None: if value is None:
continue continue
...@@ -168,7 +166,7 @@ class CacheClass(BaseCache): ...@@ -168,7 +166,7 @@ class CacheClass(BaseCache):
If timeout is given, that timeout will be used for the key; otherwise If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used. the default cache timeout will be used.
""" """
pipeline = self._cache.pipeline() pipeline = self._client.pipeline()
for key, value in data.iteritems(): for key, value in data.iteritems():
self.set(key, value, timeout, version=version, client=pipeline) self.set(key, value, timeout, version=version, client=pipeline)
pipeline.execute() pipeline.execute()
...@@ -179,11 +177,11 @@ class CacheClass(BaseCache): ...@@ -179,11 +177,11 @@ class CacheClass(BaseCache):
ValueError exception. ValueError exception.
""" """
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
exists = self._cache.exists(key) exists = self._client.exists(key)
if not exists: if not exists:
raise ValueError("Key '%s' not found" % key) raise ValueError("Key '%s' not found" % key)
try: try:
value = self._cache.incr(key, delta) value = self._client.incr(key, delta)
except redis.ResponseError: except redis.ResponseError:
value = self.get(key) + 1 value = self.get(key) + 1
self.set(key, value) self.set(key, value)
...@@ -212,7 +210,7 @@ class RedisCache(CacheClass): ...@@ -212,7 +210,7 @@ class RedisCache(CacheClass):
version = self.version version = self.version
old_key = self.make_key(key, version) old_key = self.make_key(key, version)
value = self.get(old_key, version=version) value = self.get(old_key, version=version)
ttl = self._cache.ttl(old_key) ttl = self._client.ttl(old_key)
if value is None: if value is None:
raise ValueError("Key '%s' not found" % key) raise ValueError("Key '%s' not found" % key)
new_key = self.make_key(key, version=version+delta) new_key = self.make_key(key, version=version+delta)
......
...@@ -34,7 +34,7 @@ class RedisCacheTests(TestCase): ...@@ -34,7 +34,7 @@ class RedisCacheTests(TestCase):
def reset_pool(self): def reset_pool(self):
if hasattr(self, 'cache'): if hasattr(self, 'cache'):
self.cache._cache.connection_pool.disconnect() self.cache._client.connection_pool.disconnect()
def get_cache(self, backend=None): def get_cache(self, backend=None):
if VERSION[0] == 1 and VERSION[1] < 3: if VERSION[0] == 1 and VERSION[1] < 3:
...@@ -52,9 +52,9 @@ class RedisCacheTests(TestCase): ...@@ -52,9 +52,9 @@ class RedisCacheTests(TestCase):
def test_default_initialization(self): def test_default_initialization(self):
self.reset_pool() self.reset_pool()
self.cache = self.get_cache('redis_cache.cache://127.0.0.1') self.cache = self.get_cache('redis_cache.cache://127.0.0.1')
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['host'], '127.0.0.1') self.assertEqual(self.cache._client.connection_pool.connection_kwargs['host'], '127.0.0.1')
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['db'], 1) self.assertEqual(self.cache._client.connection_pool.connection_kwargs['db'], 1)
self.assertEqual(self.cache._cache.connection_pool.connection_kwargs['port'], 6379) self.assertEqual(self.cache._client.connection_pool.connection_kwargs['port'], 6379)
def test_simple(self): def test_simple(self):
# Simple cache set/get works # Simple cache set/get works
...@@ -194,7 +194,7 @@ class RedisCacheTests(TestCase): ...@@ -194,7 +194,7 @@ class RedisCacheTests(TestCase):
def test_set_expiration_timeout_None(self): def test_set_expiration_timeout_None(self):
key, value = self.cache.make_key('key'), 'value' key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value); self.cache.set(key, value);
self.assertTrue(self.cache._cache.ttl(key) > 0) self.assertTrue(self.cache._client.ttl(key) > 0)
def test_unicode(self): def test_unicode(self):
# Unicode values can be cached # Unicode values can be cached
...@@ -283,12 +283,12 @@ class RedisCacheTests(TestCase): ...@@ -283,12 +283,12 @@ class RedisCacheTests(TestCase):
key = self.cache.make_key("key") key = self.cache.make_key("key")
# manually set value using the redis client # manually set value using the redis client
self.cache._cache.set(key, pickle.dumps(number)) self.cache._client.set(key, pickle.dumps(number))
new_value = self.cache.incr(key) new_value = self.cache.incr(key)
self.assertEqual(new_value, number + 1) self.assertEqual(new_value, number + 1)
# Test that the pickled value was converted to an integer # Test that the pickled value was converted to an integer
value = int(self.cache._cache.get(key)) value = int(self.cache._client.get(key))
self.assertTrue(isinstance(value, int)) self.assertTrue(isinstance(value, int))
# now that the value is an integer, let's increment it again. # now that the value is an integer, let's increment it again.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment