Commit b94eea68 authored by Sean Bleier's avatar Sean Bleier

Merging version 1.0 into master.

parent 9bf25f77
......@@ -13,4 +13,9 @@ development_settings.py
.pydevproject
.settings
dist/*
dump.rdb
dist/*
MANIFEST
.venv
redis/
*/_build/
language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
- "3.4"
env:
- DJANGO_VERSION=1.5
- DJANGO_VERSION=1.6
- DJANGO_VERSION=1.7
- DJANGO_VERSION=1.8
# command to run tests
script: make test DJANGO_VERSION=$DJANGO_VERSION
branches:
only:
- unstable
- master
SHELL := /bin/bash
PACKAGE_NAME=redis_cache
VENV_DIR?=.venv
VENV_ACTIVATE=$(VENV_DIR)/bin/activate
WITH_VENV=. $(VENV_ACTIVATE);
DJANGO_VERSION?=1.7
default:
python setup.py check build
$(VENV_ACTIVATE): requirements*.txt
test -f $@ || virtualenv --python=python2.7 --system-site-packages $(VENV_DIR)
touch $@
.PHONY: install_requirements
install_requirements: requirements*.txt
$(WITH_VENV) pip install --no-deps -r requirements.txt
$(WITH_VENV) pip install --no-deps -r requirements-dev.txt
$(WITH_VENV) pip install Django==$(DJANGO_VERSION)
.PHONY: venv
venv: $(VENV_ACTIVATE)
.PHONY: setup
setup: venv
.PHONY: redis_servers
redis_servers:
test -d redis || git clone https://github.com/antirez/redis
git -C redis checkout 2.6
make -C redis
for i in 1 2 3; do \
./redis/src/redis-server \
--pidfile /tmp/redis`echo $$i`.pid \
--requirepass yadayada \
--daemonize yes \
--port `echo 638$$i` ; \
done
for i in 4 5 6; do \
./redis/src/redis-server \
--pidfile /tmp/redis`echo $$i`.pid \
--requirepass yadayada \
--daemonize yes \
--port 0 \
--unixsocket /tmp/redis`echo $$i`.sock \
--unixsocketperm 755 ; \
done
.PHONY: clean
clean:
python setup.py clean
rm -rf build/
rm -rf dist/
rm -rf *.egg*/
rm -rf __pycache__/
rm -f MANIFEST
rm -f test.db
find $(PACKAGE_NAME) -type f -name '*.pyc' -delete
.PHONY: teardown
teardown:
rm -rf $(VENV_DIR)/
.PHONY: test
test: venv install_requirements redis_servers
$(WITH_VENV) PYTHONPATH=$(PYTHONPATH): django-admin.py test --settings=tests.settings -s
for i in 1 2 3 4 5 6; do kill `cat /tmp/redis$$i.pid`; done;
.PHONY: shell
shell: venv
$(WITH_VENV) PYTHONPATH=$(PYTHONPATH): django-admin.py shell --settings=tests.settings
......@@ -2,12 +2,33 @@
Redis Django Cache Backend
==========================
A cache backend for Django using the Redis datastructure server.
.. image:: https://pypip.in/download/django-redis-cache/badge.svg
:target: https://pypi.python.org/pypi//django-redis-cache/
:alt: Downloads
.. image:: https://pypip.in/version/django-redis-cache/badge.svg
:target: https://pypi.python.org/pypi/django-redis-cache/
:alt: Latest Version
A Redis cache backend for Django
Changelog
=========
=======
1.0.0
-----
* Deprecate support for django < 1.3 and redis < 2.4. If you need support for those versions,
pin django-redis-cache to a version less than 1.0, i.e. pip install django-redis-cache<1.0
* Application level sharding when a list of locations is provided in the settings.
* Delete keys using wildcard syntax.
* Clear cache using version to delete only keys under that namespace.
* Ability to select pickle protocol version.
* Support for Master-Slave setup
* Thundering herd protection
0.13.0
------
......@@ -23,18 +44,13 @@ Changelog
* Adds `ttl` method to the cache. `cache.ttl(key)` will return the number of
seconds before it expires or None if the key is not volitile.
0.11.1
------
* Allows user to specify the connection pool class kwargs, e.g. timeout,
max_connections, etc.
0.11.0
------
* Adds support for specifying the connection pool class.
* Adds ability to set the max connections for the connection pool.
0.10.0
------
......@@ -56,20 +72,14 @@ when parsing messages from the redis server. redis-py will pick the best
parser for you implicitly, but using the ``PARSER_CLASS`` setting gives you
control and the option to roll your own parser class if you are so bold.
Notes
-----
This cache backend requires the `redis-py`_ Python client library for
communicating with the Redis server.
Redis writes to disk asynchronously so there is a slight chance
of losing some data, but for most purposes this is acceptable.
Requirements
============
In order to use ``redis.connection.HiredisParser`` parser class, you need to
pip install `hiredis`_. This is the recommended parser class.
Usage
-----
`redis-py`_ >= 2.4.5
`redis`_ >= 2.4
`hiredis`_
`python`_ >= 2.5
1. Run ``pip install django-redis-cache``.
......@@ -81,7 +91,11 @@ Usage
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '<host>:<port>',
'LOCATION': [
'<host>:<port>',
'<host>:<port>',
'<host>:<port>',
],
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
......@@ -91,6 +105,8 @@ Usage
'max_connections': 50,
'timeout': 20,
}
'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
......@@ -105,12 +121,102 @@ Usage
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser'
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
},
},
}
.. _redis: http://redis.io
.. _redis-py: http://github.com/andymccurdy/redis-py/
.. _hiredis: https://github.com/pietern/hiredis-py
# For Master-Slave Setup, specify the host:port of the master
# redis-server instance.
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'<host>:<port>',
'<host>:<port>',
'<host>:<port>',
],
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'MASTER_CACHE': '<master host>:<master port>',
},
},
}
Usage
=====
django-redis-cache shares the same API as django's built-in cache backends,
with a few exceptions.
``cache.delete_pattern``
Delete keys using glob-style pattern.
example::
>>> from news.models import Story
>>>
>>> most_viewed = Story.objects.most_viewed()
>>> highest_rated = Story.objects.highest_rated()
>>> cache.set('news.stories.most_viewed', most_viewed)
>>> cache.set('news.stories.highest_rated', highest_rated)
>>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed'])
>>> len(data)
2
>>> cache.delete_pattern('news.stores.*')
>>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed'])
>>> len(data)
0
``cache.clear``
Same as django's ``cache.clear``, except that you can optionally specify a
version and all keys with that version will be deleted. If no version is
provided, all keys are flushed from the cache.
``cache.reinsert_keys``
This helper method retrieves all keys and inserts them back into the cache. This
is useful when changing the pickle protocol number of all the cache entries.
As of django-redis-cache < 1.0, all cache entries were pickled using version 0.
To reduce the memory footprint of the redis-server, simply run this method to
upgrade cache entries to the latest protocol.
Thundering Herd Protection
==========================
A common problem with caching is that you can sometimes get into a situation
where you have a value that takes a long time to compute or retrieve, but have
clients accessing it a lot. For example, if you wanted to retrieve the latest
tweets from the twitter api, you probably want to cache the response for a number
of minutes so you don't exceed your rate limit. However, when the cache entry
expires you can have mulitple clients that see there is no entry and try to
simultaneously fetch the latest results from the api.
The way to get around this problem you pass in a callable and timeout to
``get_or_set``, which will check the cache to see if you need to compute the
value. If it does, then the cache sets a placeholder that tells future clients
to serve data from the stale cache until the new value is created.
Example::
tweets = cache.get_or_set('tweets', twitter.get_newest, timeout=300)
Running Tests
=============
``make test``
.. _redis-py: http://github.com/andymccurdy/redis-py/
.. _redis: http://github.com/antirez/redis/
.. _hiredis: http://github.com/antirez/hiredis/
.. _python: http://python.org
from redis_cache.cache import RedisCache
from redis_cache.backends.single import RedisCache
from redis_cache.backends.multiple import ShardedRedisCache
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured
from django.utils import importlib
from django.utils.functional import cached_property
from django.utils.importlib import import_module
from redis_cache.compat import smart_bytes, DEFAULT_TIMEOUT
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import redis
except ImportError:
raise InvalidCacheBackendError("Redis cache backend requires the 'redis-py' library")
from redis.connection import DefaultParser
from redis_cache.connection import pool
from redis_cache.utils import CacheKey
class BaseRedisCache(BaseCache):
def __init__(self, server, params):
"""
Connect to Redis, and set up cache backend.
"""
super(BaseRedisCache, self).__init__(params)
self.server = server
self.params = params or {}
self.options = params.get('OPTIONS', {})
def __getstate__(self):
return {'params': self.params, 'server': self.server}
def __setstate__(self, state):
self.__init__(**state)
def create_client(self, server):
kwargs = {
'db': self.db,
'password': self.password,
}
if '://' in server:
client = redis.Redis.from_url(
server,
parser_class=self.parser_class,
**kwargs
)
kwargs.update(
client.connection_pool.connection_kwargs,
unix_socket_path=client.connection_pool.connection_kwargs.get('path'),
)
else:
unix_socket_path = None
if ':' in server:
host, port = server.rsplit(':', 1)
try:
port = int(port)
except (ValueError, TypeError):
raise ImproperlyConfigured("Port value must be an integer")
else:
host, port = None, None
unix_socket_path = server
kwargs.update(host=host, port=port, unix_socket_path=unix_socket_path)
client = redis.Redis(**kwargs)
kwargs.update(
parser_class=self.parser_class,
connection_pool_class=self.connection_pool_class,
connection_pool_class_kwargs=self.connection_pool_class_kwargs,
)
connection_pool = pool.get_connection_pool(**kwargs)
client.connection_pool = connection_pool
return client
@cached_property
def db(self):
_db = self.params.get('db', self.options.get('DB', 1))
try:
return int(_db)
except (ValueError, TypeError):
raise ImproperlyConfigured("db value must be an integer")
@cached_property
def password(self):
return self.params.get('password', self.options.get('PASSWORD', None))
@cached_property
def parser_class(self):
cls = self.options.get('PARSER_CLASS', None)
if cls is None:
return DefaultParser
mod_path, cls_name = cls.rsplit('.', 1)
try:
mod = importlib.import_module(mod_path)
parser_class = getattr(mod, cls_name)
except AttributeError:
raise ImproperlyConfigured("Could not find parser class '%s'" % parser_class)
except ImportError, e:
raise ImproperlyConfigured("Could not find module '%s'" % e)
return parser_class
@cached_property
def pickle_version(self):
"""
Get the pickle version from the settings and save it for future use
"""
_pickle_version = self.options.get('PICKLE_VERSION', -1)
try:
return int(_pickle_version)
except (ValueError, TypeError):
raise ImproperlyConfigured("pickle version value must be an integer")
@cached_property
def connection_pool_class(self):
pool_class = self.options.get('CONNECTION_POOL_CLASS', 'redis.ConnectionPool')
module_name, class_name = pool_class.rsplit('.', 1)
module = import_module(module_name)
try:
return getattr(module, class_name)
except AttributeError:
raise ImportError('cannot import name %s' % class_name)
@cached_property
def connection_pool_class_kwargs(self):
return self.options.get('CONNECTION_POOL_CLASS_KWARGS', {})
@cached_property
def master_client(self):
"""
Get the write server:port of the master cache
"""
cache = self.options.get('MASTER_CACHE', None)
if cache is None:
self._master_client = None
else:
self._master_client = None
try:
host, port = cache.split(":")
except ValueError:
raise ImproperlyConfigured("MASTER_CACHE must be in the form <host>:<port>")
for client in self.clients.itervalues():
connection_kwargs = client.connection_pool.connection_kwargs
if connection_kwargs['host'] == host and connection_kwargs['port'] == int(port):
return client
if self._master_client is None:
raise ImproperlyConfigured("%s is not in the list of available redis-server instances." % cache)
def serialize(self, value):
return pickle.dumps(value, self.pickle_version)
def deserialize(self, value):
"""
Unpickles the given value.
"""
value = smart_bytes(value)
return pickle.loads(value)
def get_value(self, original):
try:
value = int(original)
except (ValueError, TypeError):
value = self.deserialize(original)
return value
def prep_value(self, value):
if isinstance(value, int) and not isinstance(value, bool):
return value
return self.serialize(value)
def make_key(self, key, version=None):
if not isinstance(key, CacheKey):
versioned_key = super(BaseRedisCache, self).make_key(key, version)
return CacheKey(key, versioned_key)
return key
def make_keys(self, keys, version=None):
return [self.make_key(key, version=version) for key in keys]
####################
# Django cache api #
####################
def _add(self, client, key, value, timeout):
return self._set(key, value, timeout, client, _add_only=True)
def add(self, key, value, timeout=None, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
raise NotImplementedError
def _get(self, client, key, default=None):
value = client.get(key)
if value is None:
return default
value = self.get_value(value)
return value
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
raise NotImplementedError
def __set(self, client, key, value, timeout, _add_only=False):
if timeout is None or timeout == 0:
if _add_only:
return client.setnx(key, value)
return client.set(key, value)
elif timeout > 0:
if _add_only:
added = client.setnx(key, value)
if added:
client.expire(key, timeout)
return added
return client.setex(key, value, timeout)
else:
return False
def _set(self, key, value, timeout=DEFAULT_TIMEOUT, client=None, _add_only=False):
"""
Persist a value to the cache, and set an optional expiration time.
"""
if timeout is DEFAULT_TIMEOUT:
timeout = self.default_timeout
if timeout is not None:
timeout = int(timeout)
# If ``value`` is not an int, then pickle it
if not isinstance(value, int) or isinstance(value, bool):
result = self.__set(client, key, pickle.dumps(value), timeout, _add_only)
else:
result = self.__set(client, key, value, timeout, _add_only)
# result is a boolean
return result
def set(self, key, value, timeout=None, version=None, client=None):
"""
Persist a value to the cache, and set an optional expiration time.
"""
raise NotImplementedError()
def _delete(self, client, key):
return client.delete(key)
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
raise NotImplementedError
def _delete_many(self, client, keys):
return client.delete(*keys)
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
raise NotImplementedError
def _clear(self, client):
return client.flushdb()
def clear(self, version=None):
"""
Flush cache keys.
If version is specified, all keys belonging the version's key
namespace will be deleted. Otherwise, all keys will be deleted.
"""
raise NotImplementedError
def _get_many(self, client, original_keys, versioned_keys):
"""
Retrieve many keys.
"""
recovered_data = {}
map_keys = dict(zip(versioned_keys, original_keys))
results = client.mget(versioned_keys)
for key, value in zip(versioned_keys, results):
if value is None:
continue
recovered_data[map_keys[key]] = self.get_value(value)
return recovered_data
def get_many(self, keys, version=None):
raise NotImplementedError
def _set_many(self, client, data):
new_data = {}
for key, value in data.items():
new_data[key] = self.prep_value(value)
return client.mset(new_data)
def set_many(self, data, timeout=None, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
raise NotImplementedError
def _incr(self, client, key, delta=1):
exists = client.exists(key)
if not exists:
raise ValueError("Key '%s' not found" % key)
try:
value = client.incr(key, delta)
except redis.ResponseError:
value = self._get(client, key) + delta
self._set(client, key, value, timeout=None)
return value
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
raise NotImplementedError
def _incr_version(self, client, old, new, delta, version):
try:
client.rename(old, new)
except redis.ResponseError:
raise ValueError("Key '%s' not found" % old._original_key)
return version + delta
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
"""
raise NotImplementedError
#####################
# Extra api methods #
#####################
def _has_key(self, client, key, version=None):
"""Returns True if the key is in the cache and has not expired."""
key = self.make_key(key, version=version)
return client.exists(key)
def has_key(self, key, version=None):
raise NotImplementedError
def _ttl(self, client, key):
"""
Returns the 'time-to-live' of a key. If the key is not volitile, i.e.
it has not set expiration, then the value returned is None. Otherwise,
the value is the number of seconds remaining. If the key does not exist,
0 is returned.
"""
if client.exists(key):
return client.ttl(key)
return 0
def ttl(self, key, version=None):
raise NotImplementedError
def _delete_pattern(self, client, pattern):
keys = client.keys(pattern)
if len(keys):
client.delete(*keys)
def delete_pattern(self, pattern, version=None):
raise NotImplementedError
def _get_or_set(self, client, key, func, timeout=None):
if not callable(func):
raise Exception("func must be a callable")
dogpile_lock_key = "_lock" + key._versioned_key
dogpile_lock = client.get(dogpile_lock_key)
if dogpile_lock is None:
self._set(dogpile_lock_key, 0, None, client)
value = func()
self.__set(client, key, self.prep_value(value), None)
self.__set(client, dogpile_lock_key, 0, timeout)
else:
value = self._get(client, key)
return value
def get_or_set(self, key, func, timeout=None, version=None):
raise NotImplementedError
def _reinsert_keys(self, client):
keys = client.keys('*')
for key in keys:
timeout = client.ttl(key)
value = self.deserialize(client.get(key))
if timeout is None:
client.set(key, self.prep_value(value))
def reinsert_keys(self):
"""
Reinsert cache entries using the current pickle protocol version.
"""
raise NotImplementedError
from collections import defaultdict
from django.core.exceptions import ImproperlyConfigured
from redis_cache.backends.base import BaseRedisCache
from redis_cache.compat import DEFAULT_TIMEOUT
from redis_cache.sharder import HashRing
class ShardedRedisCache(BaseRedisCache):
def __init__(self, server, params):
super(ShardedRedisCache, self).__init__(server, params)
self._params = params
self._server = server
self._pickle_version = None
self.__master_client = None
self.clients = {}
self.sharder = HashRing()
if not isinstance(server, (list, tuple)):
servers = [server]
else:
servers = server
for server in servers:
client = self.create_client(server)
self.clients[client.connection_pool.connection_identifier] = client
self.sharder.add(client.connection_pool.connection_identifier)
@property
def master_client(self):
"""
Get the write server:port of the master cache
"""
if not hasattr(self, '_master_client') and self.__master_client is None:
cache = self.options.get('MASTER_CACHE', None)
if cache is None:
self._master_client = None
else:
self._master_client = None
try:
host, port = cache.split(":")
except ValueError:
raise ImproperlyConfigured("MASTER_CACHE must be in the form <host>:<port>")
for client in self.clients:
connection_kwargs = client.connection_pool.connection_kwargs
if connection_kwargs['host'] == host and connection_kwargs['port'] == int(port):
self._master_client = client
break
if self._master_client is None:
raise ImproperlyConfigured("%s is not in the list of available redis-server instances." % cache)
return self._master_client
def get_client(self, key, for_write=False):
if for_write and self.master_client is not None:
return self.master_client
node = self.sharder.get_node(unicode(key))
return self.clients[node]
def shard(self, keys, for_write=False, version=None):
"""
Returns a dict of keys that belong to a cache's keyspace.
"""
clients = defaultdict(list)
for key in keys:
clients[self.get_client(key, for_write)].append(self.make_key(key, version))
return clients
####################
# Django cache api #
####################
def add(self, key, value, timeout=None, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
client = self.get_client(key)
key = self.make_key(key, version=version)
return self._add(client, key, value, timeout)
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
client = self.get_client(key)
key = self.make_key(key, version=version)
return self._get(client, key, default)
def set(self, key, value, timeout=None, version=None, client=None):
"""
Persist a value to the cache, and set an optional expiration time.
"""
if client is None:
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._set(key, value, timeout, client=client)
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._delete(client, key)
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
clients = self.shard(keys, for_write=True, version=version)
for client, keys in clients.items():
self._delete_many(client, keys)
def clear(self, version=None):
"""
Flush cache keys.
If version is specified, all keys belonging the version's key
namespace will be deleted. Otherwise, all keys will be deleted.
"""
if version is None:
if self.master_client is None:
for client in self.clients.itervalues():
self._clear(client)
else:
self._clear(self.master_client)
else:
self.delete_pattern('*', version=version)
def get_many(self, keys, version=None):
data = {}
clients = self.shard(keys, version=version)
for client, versioned_keys in clients.items():
original_keys = [key._original_key for key in versioned_keys]
data.update(self._get_many(client, original_keys, versioned_keys=versioned_keys))
return data
def set_many(self, data, timeout=None, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
clients = self.shard(data.keys(), for_write=True, version=version)
if timeout is None:
for client, keys in clients.items():
subset = {}
for key in keys:
subset[key] = data[key._original_key]
self._set_many(client, subset)
return
for client, keys in clients.items():
pipeline = client.pipeline()
for key in keys:
self._set(key, data[key._original_key], timeout, client=pipeline)
pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._incr(client, key, delta=delta)
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
"""
if version is None:
version = self.version
client = self.get_client(key, for_write=True)
old = self.make_key(key, version=version)
new = self.make_key(key, version=version + delta)
return self._incr_version(client, old, new, delta, version)
#####################
# Extra api methods #
#####################
def has_key(self, key, version=None):
client = self.get_client(key, for_write=False)
return self._has_key(client, key, version)
def ttl(self, key, version=None):
client = self.get_client(key, for_write=False)
key = self.make_key(key, version=version)
return self._ttl(client, key)
def delete_pattern(self, pattern, version=None):
pattern = self.make_key(pattern, version=version)
if self.master_client is None:
for client in self.clients.itervalues():
self._delete_pattern(client, pattern)
else:
self._delete_pattern(self.master_client, pattern)
def get_or_set(self, key, func, timeout=None, version=None):
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._get_or_set(client, key, func, timeout)
def reinsert_keys(self):
"""
Reinsert cache entries using the current pickle protocol version.
"""
for client in self.clients.itervalues():
self._reinsert_keys(client)
print
try:
import cPickle as pickle
except ImportError:
import pickle
from redis_cache.backends.base import BaseRedisCache
from redis_cache.compat import bytes_type, DEFAULT_TIMEOUT
class RedisCache(BaseRedisCache):
def __init__(self, server, params):
"""
Connect to Redis, and set up cache backend.
"""
super(RedisCache, self).__init__(server, params)
if not isinstance(server, bytes_type):
self._server, = server
self.client = self.create_client(server)
self.clients = {
self.client.connection_pool.connection_identifier: self.client
}
def get_client(self, *args):
return self.client
####################
# Django cache api #
####################
def add(self, key, value, timeout=None, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
key = self.make_key(key, version=version)
return self._add(self.client, key, value, timeout)
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
key = self.make_key(key, version=version)
return self._get(self.client, key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None):
"""
Persist a value to the cache, and set an optional expiration time.
"""
key = self.make_key(key, version=version)
return self._set(key, value, timeout, client=self.client)
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
key = self.make_key(key, version=version)
return self._delete(self.client, key)
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
versioned_keys = self.make_keys(keys, version=version)
self._delete_many(self.client, versioned_keys)
def clear(self, version=None):
"""
Flush cache keys.
If version is specified, all keys belonging the version's key
namespace will be deleted. Otherwise, all keys will be deleted.
"""
if version is None:
self._clear(self.client)
else:
self.delete_pattern('*', version=version)
def get_many(self, keys, version=None):
versioned_keys = self.make_keys(keys, version=version)
return self._get_many(self.client, keys, versioned_keys=versioned_keys)
def set_many(self, data, timeout=None, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
versioned_keys = self.make_keys(data.keys())
if timeout is None:
new_data = {}
for key in versioned_keys:
new_data[key] = data[key._original_key]
return self._set_many(self.client, new_data)
pipeline = self.client.pipeline()
for key in versioned_keys:
self._set(key, data[key._original_key], timeout, client=pipeline)
pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
key = self.make_key(key, version=version)
return self._incr(self.client, key, delta=delta)
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
"""
if version is None:
version = self.version
old = self.make_key(key, version)
new = self.make_key(key, version=version + delta)
return self._incr_version(self.client, old, new, delta, version)
#####################
# Extra api methods #
#####################
def has_key(self, key, version=None):
return self._has_key(self.client, key, version)
def ttl(self, key, version=None):
key = self.make_key(key, version=version)
return self._ttl(self.client, key)
def delete_pattern(self, pattern, version=None):
pattern = self.make_key(pattern, version=version)
self._delete_pattern(self.client, pattern)
def get_or_set(self, key, func, timeout=None, version=None):
key = self.make_key(key, version=version)
return self._get_or_set(self.client, key, func, timeout)
def reinsert_keys(self):
"""
Reinsert cache entries using the current pickle protocol version.
"""
self._reinsert_keys(self.client)
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured
from django.utils.datastructures import SortedDict
from .compat import (smart_text, smart_bytes, bytes_type,
python_2_unicode_compatible, DEFAULT_TIMEOUT)
try:
import importlib
except ImportError:
from django.utils import importlib
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import redis
except ImportError:
raise InvalidCacheBackendError(
"Redis cache backend requires the 'redis-py' library")
from redis.connection import UnixDomainSocketConnection, Connection
from redis.connection import DefaultParser
@python_2_unicode_compatible
class CacheKey(object):
"""
A stub string class that we can use to check if a key was created already.
"""
def __init__(self, key):
self._key = key
def __eq__(self, other):
return self._key == other
def __str__(self):
return smart_text(self._key)
def __repr__(self):
return repr(self._key)
def __hash__(self):
return hash(self._key)
class CacheConnectionPool(object):
def __init__(self):
self._connection_pools = {}
def get_connection_pool(self, host='127.0.0.1', port=6379, db=1,
password=None, parser_class=None,
unix_socket_path=None, connection_pool_class=None,
connection_pool_class_kwargs=None):
connection_identifier = (host, port, db, unix_socket_path)
pool = self._connection_pools.get(connection_identifier)
if pool is None:
connection_class = (
unix_socket_path and UnixDomainSocketConnection or Connection
)
kwargs = {
'db': db,
'password': password,
'connection_class': connection_class,
'parser_class': parser_class,
}
kwargs.update(connection_pool_class_kwargs)
if unix_socket_path is None:
kwargs.update({
'host': host,
'port': port,
})
else:
kwargs['path'] = unix_socket_path
self._connection_pools[connection_identifier] = connection_pool_class(**kwargs)
return self._connection_pools[connection_identifier]
pool = CacheConnectionPool()
class CacheClass(BaseCache):
def __init__(self, server, params):
"""
Connect to Redis, and set up cache backend.
"""
self._init(server, params)
def _init(self, server, params):
super(CacheClass, self).__init__(params)
self._server = server
self._params = params
unix_socket_path = None
if ':' in self.server:
host, port = self.server.rsplit(':', 1)
try:
port = int(port)
except (ValueError, TypeError):
raise ImproperlyConfigured("port value must be an integer")
else:
host, port = None, None
unix_socket_path = self.server
kwargs = {
'db': self.db,
'password': self.password,
'host': host,
'port': port,
'unix_socket_path': unix_socket_path,
}
connection_pool = pool.get_connection_pool(
parser_class=self.parser_class,
connection_pool_class=self.connection_pool_class,
connection_pool_class_kwargs=self.connection_pool_class_kwargs,
**kwargs
)
self._client = redis.Redis(
connection_pool=connection_pool,
**kwargs
)
@property
def server(self):
return self._server or "127.0.0.1:6379"
@property
def params(self):
return self._params or {}
@property
def options(self):
return self.params.get('OPTIONS', {})
@property
def connection_pool_class(self):
cls = self.options.get('CONNECTION_POOL_CLASS', 'redis.ConnectionPool')
mod_path, cls_name = cls.rsplit('.', 1)
try:
mod = importlib.import_module(mod_path)
pool_class = getattr(mod, cls_name)
except (AttributeError, ImportError):
raise ImproperlyConfigured("Could not find connection pool class '%s'" % cls)
return pool_class
@property
def connection_pool_class_kwargs(self):
return self.options.get('CONNECTION_POOL_CLASS_KWARGS', {})
@property
def db(self):
_db = self.params.get('db', self.options.get('DB', 1))
try:
_db = int(_db)
except (ValueError, TypeError):
raise ImproperlyConfigured("db value must be an integer")
return _db
@property
def password(self):
return self.params.get('password', self.options.get('PASSWORD', None))
@property
def parser_class(self):
cls = self.options.get('PARSER_CLASS', None)
if cls is None:
return DefaultParser
mod_path, cls_name = cls.rsplit('.', 1)
try:
mod = importlib.import_module(mod_path)
parser_class = getattr(mod, cls_name)
except (AttributeError, ImportError):
raise ImproperlyConfigured("Could not find parser class '%s'" % parser_class)
return parser_class
def __getstate__(self):
return {'params': self._params, 'server': self._server}
def __setstate__(self, state):
self._init(**state)
def make_key(self, key, version=None):
"""
Returns the utf-8 encoded bytestring of the given key as a CacheKey
instance to be able to check if it was "made" before.
"""
if not isinstance(key, CacheKey):
key = CacheKey(key)
return key
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
return self.set(key, value, timeout, _add_only=True)
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
key = self.make_key(key, version=version)
value = self._client.get(key)
if value is None:
return default
try:
result = int(value)
except (ValueError, TypeError):
result = self.unpickle(value)
return result
def _set(self, key, value, timeout, client, _add_only=False):
if timeout is None or timeout == 0:
if _add_only:
return client.setnx(key, value)
return client.set(key, value)
elif timeout > 0:
if _add_only:
added = client.setnx(key, value)
if added:
client.expire(key, timeout)
return added
return client.setex(key, value, timeout)
else:
return False
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, _add_only=False):
"""
Persist a value to the cache, and set an optional expiration time.
"""
if not client:
client = self._client
key = self.make_key(key, version=version)
if timeout is DEFAULT_TIMEOUT:
timeout = self.default_timeout
if timeout is not None:
timeout = int(timeout)
# If ``value`` is not an int, then pickle it
if not isinstance(value, int) or isinstance(value, bool):
result = self._set(key, pickle.dumps(value), timeout, client, _add_only)
else:
result = self._set(key, value, timeout, client, _add_only)
# result is a boolean
return result
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
self._client.delete(self.make_key(key, version=version))
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
if keys:
keys = map(lambda key: self.make_key(key, version=version), keys)
self._client.delete(*keys)
def clear(self):
"""
Flush all cache keys.
"""
# TODO : potential data loss here, should we only delete keys based on the correct version ?
self._client.flushdb()
def unpickle(self, value):
"""
Unpickles the given value.
"""
value = smart_bytes(value)
return pickle.loads(value)
def get_many(self, keys, version=None):
"""
Retrieve many keys.
"""
if not keys:
return {}
recovered_data = SortedDict()
new_keys = list(map(lambda key: self.make_key(key, version=version), keys))
map_keys = dict(zip(new_keys, keys))
results = self._client.mget(new_keys)
for key, value in zip(new_keys, results):
if value is None:
continue
try:
value = int(value)
except (ValueError, TypeError):
value = self.unpickle(value)
if isinstance(value, bytes_type):
value = smart_text(value)
recovered_data[map_keys[key]] = value
return recovered_data
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
pipeline = self._client.pipeline()
for key, value in data.items():
self.set(key, value, timeout, version=version, client=pipeline)
pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
key = self.make_key(key, version=version)
exists = self._client.exists(key)
if not exists:
raise ValueError("Key '%s' not found" % key)
try:
value = self._client.incr(key, delta)
except redis.ResponseError:
value = self.get(key) + delta
self.set(key, value)
return value
def ttl(self, key, version=None):
"""
Returns the 'time-to-live' of a key. If the key is not volitile, i.e.
it has not set expiration, then the value returned is None. Otherwise,
the value is the number of seconds remaining. If the key does not exist,
0 is returned.
"""
key = self.make_key(key, version=version)
if self._client.exists(key):
return self._client.ttl(key)
return 0
def has_key(self, key, version=None):
"""
Returns True if the key is in the cache and has not expired.
"""
key = self.make_key(key, version=version)
return self._client.exists(key)
class RedisCache(CacheClass):
"""
A subclass that is supposed to be used on Django >= 1.3.
"""
def make_key(self, key, version=None):
if not isinstance(key, CacheKey):
key = CacheKey(super(CacheClass, self).make_key(key, version))
return key
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
Note: In Redis 2.0 you cannot rename a volitile key, so we have to move
the value from the old key to the new key and maintain the ttl.
"""
if version is None:
version = self.version
old_key = self.make_key(key, version)
value = self.get(old_key, version=version)
ttl = self._client.ttl(old_key)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_key = self.make_key(key, version=version + delta)
# TODO: See if we can check the version of Redis, since 2.2 will be able
# to rename volitile keys.
self.set(new_key, value, timeout=ttl)
self.delete(old_key)
return version + delta
# for backwards compat
from redis_cache import RedisCache
from redis_cache import ShardedRedisCache
from redis_cache.backends.base import ImproperlyConfigured
from redis_cache.connection import pool
from redis.connection import UnixDomainSocketConnection, Connection
class CacheConnectionPool(object):
def __init__(self):
self._connection_pools = {}
def get_connection_pool(
self,
host='127.0.0.1',
port=6379,
db=1,
password=None,
parser_class=None,
unix_socket_path=None,
connection_pool_class=None,
connection_pool_class_kwargs=None,
**kwargs
):
connection_identifier = (
host, port, db, unix_socket_path
)
pool = self._connection_pools.get(connection_identifier)
if pool is None:
connection_class = (
unix_socket_path and UnixDomainSocketConnection or Connection
)
kwargs = {
'db': db,
'password': password,
'connection_class': connection_class,
'parser_class': parser_class,
}
kwargs.update(connection_pool_class_kwargs)
if unix_socket_path is None:
kwargs.update({
'host': host,
'port': port,
})
else:
kwargs['path'] = unix_socket_path
self._connection_pools[connection_identifier] = connection_pool_class(**kwargs)
self._connection_pools[connection_identifier].connection_identifier = (
connection_identifier
)
return self._connection_pools[connection_identifier]
pool = CacheConnectionPool()
from bisect import insort, bisect
from hashlib import md5
from math import log
import sys
#from django.utils.encoding import smart_str
DIGITS = int(log(sys.maxint) / log(16))
def make_hash(s):
return int(md5(s.encode('utf-8')).hexdigest()[:DIGITS], 16)
class Node(object):
def __init__(self, node, i):
self._node = node
self._position = make_hash("%d:%s" % (i, str(self._node)))
def __cmp__(self, other):
if isinstance(other, int):
return cmp(self._position, other)
elif isinstance(other, Node):
return cmp(self._position, other._position)
raise TypeError('Cannot compare this class with "%s" type' % type(other))
def __eq__(self, other):
return self._node == other._node
class HashRing(object):
def __init__(self, replicas=16):
self.replicas = replicas
self._nodes = []
def _add(self, node, i):
insort(self._nodes, Node(node, i))
def add(self, node, weight=1):
for i in xrange(weight * self.replicas):
self._add(node, i)
def remove(self, node):
n = len(self._nodes)
for i, _node in enumerate(reversed(self._nodes)):
if node == _node._node:
del self._nodes[n - i - 1]
def get_node(self, key):
i = bisect(self._nodes, make_hash(key)) - 1
return self._nodes[i]._node
from redis_cache.compat import smart_text, python_2_unicode_compatible
@python_2_unicode_compatible
class CacheKey(object):
"""
A stub string class that we can use to check if a key was created already.
"""
def __init__(self, key, versioned_key):
self._original_key = key
self._versioned_key = versioned_key
def __eq__(self, other):
return self._versioned_key == other
def __unicode__(self):
return smart_text(self._versioned_key)
__repr__ = __str__ = __unicode__
hiredis==0.2.0
django-nose==1.4
nose==1.3.6
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.13.1",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
name="django-redis-cache",
url="http://github.com/sebleier/django-redis-cache/",
author="Sean Bleier",
author_email="sebleier@gmail.com",
version="1.0.0a",
packages=["redis_cache"],
description="Redis Cache Backend for Django",
install_requires=['redis>=2.4.5'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
......
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
cache_settings = {
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
'INSTALLED_APPS': [
'tests.testapp',
],
'ROOT_URLCONF': 'tests.urls',
'CACHES': {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '/tmp/redis.sock',
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser'
},
},
},
}
if not settings.configured:
settings.configure(**cache_settings)
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args):
if not test_args:
test_args = ['testapp']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
runner = DjangoTestSuiteRunner(verbosity=1, interactive=True, failfast=False)
failures = runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
#!/usr/bin/env python
import sys
from os.path import dirname, abspath, join
import django
from django.conf import settings
cache_settings = {
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
'MIDDLEWARE_CLASSES':(),
'INSTALLED_APPS': [
'testapp',
],
'ROOT_URLCONF': 'tests.urls',
'CACHES': {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'DB': 15,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2
}
},
},
},
}
if not settings.configured:
settings.configure(**cache_settings)
try:
from django.test.simple import DjangoTestSuiteRunner as TestSuiteRunner
except ImportError:
from django.test.runner import DiscoverRunner as TestSuiteRunner
def runtests(*test_args):
if not test_args:
test_args = ['testapp']
sys.path.insert(0, join(dirname(abspath(__file__)), 'tests'))
try:
django.setup()
except AttributeError:
pass
runner = TestSuiteRunner(verbosity=1, interactive=True, failfast=False)
failures = runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
"""
A quick and dirty benchmarking script. GitPython is an optional dependency
which you can use to change branches via the command line.
Usage::
python benchmark.py
python benchmark.py master
python benchamrk.py some-branch
"""
import os
import sys
from time import time
from django.core import cache
from hashlib import sha1 as sha
try:
from git import Repo
except ImportError:
pass
else:
if len(sys.argv) > 1:
repo_path = os.path.dirname(__file__)
repo = Repo(repo_path)
repo.branches[sys.argv[1]].checkout()
print "Testing %s" % repo.active_branch
def h(value):
return sha(str(value)).hexdigest()
class BenchmarkRegistry(type):
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'benchmarks'):
cls.benchmarks = []
else:
cls.benchmarks.append(cls)
class Benchmark(object):
__metaclass__ = BenchmarkRegistry
def setUp(self):
pass
def tearDown(self):
pass
def timetrial(self):
self.setUp()
start = time()
self.run()
t = time() - start
self.tearDown()
return t
def run(self):
pass
@classmethod
def run_benchmarks(cls):
for benchmark in cls.benchmarks:
benchmark = benchmark()
print benchmark.__doc__
print "Time: %s" % (benchmark.timetrial())
class GetAndSetBenchmark(Benchmark):
"Settings and Getting Mixed"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
def run(self):
for k, v in self.values.items():
self.cache.set(k, v)
for k, v in self.values.items():
value = self.cache.get(k)
class IncrBenchmark(Benchmark):
"Incrementing integers"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
self.ints = []
self.strings = []
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
self.ints.append(i)
self.strings.append(h(i))
def run(self):
for i in self.ints:
self.cache.incr(h(i), 100)
class MsetAndMGet(Benchmark):
"Getting and setting many mixed values"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
def run(self):
self.cache.set_many(self.values)
value = self.cache.get_many(self.values.keys())
if __name__ == "__main__":
Benchmark.run_benchmarks()
\ No newline at end of file
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
......@@ -7,21 +7,28 @@ DATABASES = {
}
INSTALLED_APPS = [
'django_nose',
'tests.testapp',
]
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = "shh...it's a seakret"
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': { # optional
'LOCATION': '127.0.0.1:6381',
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'MAX_CONNECTIONS': 2,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'blabla'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# -*- coding: utf-8 -*-
from hashlib import sha1
import time
import unittest
try:
import cPickle as pickle
except ImportError:
import pickle
from django import VERSION
from django.core.cache import get_cache
from django.test import TestCase
from .models import Poll, expensive_calculation
import redis
from redis.connection import UnixDomainSocketConnection
from redis_cache.cache import RedisCache, ImproperlyConfigured, pool
from tests.testapp.models import Poll, expensive_calculation
from redis_cache.cache import RedisCache, pool
from redis_cache.compat import DEFAULT_TIMEOUT
# functions/classes for complex data type tests
......@@ -25,49 +26,28 @@ class C:
return 24
class RedisCacheTests(TestCase):
"""
A common set of tests derived from Django's own cache tests
class BaseRedisTestCase(object):
"""
def setUp(self):
# use DB 16 for testing and hope there isn't any important data :->
self.reset_pool()
self.cache = self.get_cache()
def tearDown(self):
# Sometimes it will be necessary to skip this method because we need to test default
# initialization and that may be using a different port than the test redis server.
if hasattr(self, '_skip_tearDown') and self._skip_tearDown:
self._skip_tearDown = False
return
self.cache.clear()
def reset_pool(self):
if hasattr(self, 'cache'):
self.cache._client.connection_pool.disconnect()
for client in self.cache.clients.itervalues():
client.connection_pool.disconnect()
def get_cache(self, backend=None):
if VERSION[0] == 1 and VERSION[1] < 3:
cache = get_cache(backend or 'redis_cache.cache://127.0.0.1:6379?db=15')
elif VERSION[0] == 1 and VERSION[1] >= 3 and VERSION[1] <= 7:
cache = get_cache(backend or 'default')
else:
cache = get_cache(backend or 'redis_cache.cache.CacheClass', LOCATION='127.0.0.1:6379')
return cache
def test_bad_db_initialization(self):
self.assertRaises(ImproperlyConfigured, self.get_cache, 'redis_cache.cache://127.0.0.1:6379?db=not_a_number')
def test_bad_port_initialization(self):
self.assertRaises(ImproperlyConfigured, self.get_cache, 'redis_cache.cache://127.0.0.1:not_a_number?db=15')
def test_default_initialization(self):
self.reset_pool()
if VERSION[0] == 1 and VERSION[1] < 3:
self.cache = self.get_cache('redis_cache.cache://')
elif VERSION[0] == 1 and VERSION[1] >= 3:
self.cache = self.get_cache('redis_cache.cache.CacheClass')
connection_class = self.cache._client.connection_pool.connection_class
if connection_class is not UnixDomainSocketConnection:
self.assertEqual(self.cache._client.connection_pool.connection_kwargs['host'], '127.0.0.1')
self.assertEqual(self.cache._client.connection_pool.connection_kwargs['port'], 6379)
self.assertEqual(self.cache._client.connection_pool.connection_kwargs['db'], 1)
return get_cache(backend or 'default')
def test_simple(self):
# Simple cache set/get works
......@@ -78,13 +58,13 @@ class RedisCacheTests(TestCase):
# A key can be added to a cache
self.cache.add("addkey1", "value")
result = self.cache.add("addkey1", "newvalue")
self.assertEqual(result, False)
self.assertFalse(result)
self.assertEqual(self.cache.get("addkey1"), "value")
def test_non_existent(self):
# Non-existent cache keys return as None/default
# get with non-existent keys
self.assertEqual(self.cache.get("does_not_exist"), None)
self.assertIsNone(self.cache.get("does_not_exist"))
self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
......@@ -93,15 +73,13 @@ class RedisCacheTests(TestCase):
self.cache.set('b', 'b')
self.cache.set('c', 'c')
self.cache.set('d', 'd')
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
def test_get_many_with_manual_integer_insertion(self):
keys = ['a', 'b', 'c', 'd']
cache_keys = map(self.cache.make_key, keys)
# manually set integers and then get_many
for i, key in enumerate(cache_keys):
self.cache._client.set(key, i)
for i, key in enumerate(keys):
self.cache.set(key, i)
self.assertEqual(self.cache.get_many(keys), {'a': 0, 'b': 1, 'c': 2, 'd': 3})
def test_get_many_with_automatic_integer_insertion(self):
......@@ -116,20 +94,20 @@ class RedisCacheTests(TestCase):
self.cache.set("key2", "eggs")
self.assertEqual(self.cache.get("key1"), "spam")
self.cache.delete("key1")
self.assertEqual(self.cache.get("key1"), None)
self.assertIsNone(self.cache.get("key1"))
self.assertEqual(self.cache.get("key2"), "eggs")
def test_has_key(self):
# The cache can be inspected for cache keys
self.cache.set("hello1", "goodbye1")
self.assertEqual(self.cache.has_key("hello1"), True)
self.assertEqual(self.cache.has_key("goodbye1"), False)
self.assertIn("hello1", self.cache)
self.assertNotIn("goodbye1", self.cache)
def test_in(self):
# The in operator can be used to inspet cache contents
self.cache.set("hello2", "goodbye2")
self.assertEqual("hello2" in self.cache, True)
self.assertEqual("goodbye2" in self.cache, False)
self.assertIn("hello2", self.cache)
self.assertNotIn("goodbye2", self.cache)
def test_incr(self):
# Cache values can be incremented
......@@ -153,13 +131,13 @@ class RedisCacheTests(TestCase):
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string' : 'this is a string',
'int' : 42,
'list' : [1, 2, 3, 4],
'tuple' : (1, 2, 3, 4),
'dict' : {'A': 1, 'B' : 2},
'function' : f,
'class' : C,
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
self.cache.set("stuff", stuff)
self.assertEqual(self.cache.get("stuff"), stuff)
......@@ -216,27 +194,25 @@ class RedisCacheTests(TestCase):
self.cache.add("expire2", "newvalue")
self.assertEqual(self.cache.get("expire2"), "newvalue")
self.assertEqual(self.cache.has_key("expire3"), False)
self.assertEqual("expire3" in self.cache, False)
def test_set_expiration_default_timeout(self):
self.cache.set('a', 'a')
self.assertTrue(self.cache._client.ttl(self.cache.make_key('a')) > 0)
def test_set_expiration_no_timeout(self):
self.cache.set('a', 'a', timeout=None)
self.assertTrue(self.cache._client.ttl(self.cache.make_key('a')) is None)
@unittest.skipIf(DEFAULT_TIMEOUT is None, "Version of django doesn't support indefinite timeouts.")
def test_set_expiration_timeout_None(self):
key, value = 'key', 'value'
self.cache.set(key, value, timeout=None)
self.assertIsNone(self.cache.ttl(key))
def test_set_expiration_timeout_zero(self):
key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value, timeout=0)
self.assertTrue(self.cache._client.ttl(key) is None)
self.assertTrue(self.cache.has_key(key))
self.assertIsNone(self.cache.get_client(key).ttl(key))
self.assertIn(key, self.cache)
def test_set_expiration_timeout_negative(self):
key, value = self.cache.make_key('key'), 'value'
self.cache.set(key, value, timeout=-1)
self.assertTrue(self.cache._client.ttl(key) is None)
self.assertFalse(self.cache.has_key(key))
self.assertIsNone(self.cache.get_client(key).ttl(key))
self.assertNotIn(key, self.cache)
def test_unicode(self):
# Unicode values can be cached
......@@ -244,7 +220,7 @@ class RedisCacheTests(TestCase):
u'ascii': u'ascii_value',
u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
u'ascii': {u'x' : 1 }
u'ascii': {u'x': 1}
}
for (key, value) in stuff.items():
self.cache.set(key, value)
......@@ -270,8 +246,8 @@ class RedisCacheTests(TestCase):
# set_many takes a second ``timeout`` parameter
self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
self.assertIsNone(self.cache.get("key1"))
self.assertIsNone(self.cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
......@@ -279,8 +255,8 @@ class RedisCacheTests(TestCase):
self.cache.set("key2", "eggs")
self.cache.set("key3", "ham")
self.cache.delete_many(["key1", "key2"])
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
self.assertIsNone(self.cache.get("key1"))
self.assertIsNone(self.cache.get("key2"))
self.assertEqual(self.cache.get("key3"), "ham")
def test_clear(self):
......@@ -288,54 +264,35 @@ class RedisCacheTests(TestCase):
self.cache.set("key1", "spam")
self.cache.set("key2", "eggs")
self.cache.clear()
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
self.assertIsNone(self.cache.get("key1"))
self.assertIsNone(self.cache.get("key2"))
def test_long_timeout(self):
'''
Using a timeout greater than 30 days makes memcached think
"""Using a timeout greater than 30 days makes memcached think
it is an absolute expiration timestamp instead of a relative
offset. Test that we honour this convention. Refs #12399.
'''
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) #30 days + 1 second
"""
self.cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(self.cache.get('key1'), 'eggs')
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
self.cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
self.assertEqual(self.cache.get('key2'), 'ham')
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60*60*24*30 + 1)
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(self.cache.get('key3'), 'sausage')
self.assertEqual(self.cache.get('key4'), 'lobster bisque')
def test_incr_version(self):
if isinstance(self.cache, RedisCache):
old_key = "key1"
self.cache.set(old_key, "spam", version=1)
self.assertEqual(self.cache.make_key(old_key), ':1:key1')
new_version = self.cache.incr_version(old_key, 1)
key = "key1"
self.cache.set(key, "spam", version=1)
self.assertEqual(self.cache.make_key(key), ':1:key1')
new_version = self.cache.incr_version(key, 1)
self.assertEqual(new_version, 2)
new_key = self.cache.make_key(old_key, version=new_version)
new_key = self.cache.make_key(key, version=new_version)
self.assertEqual(new_key, ':2:key1')
self.assertEqual(self.cache.get(old_key), None)
self.assertEqual(self.cache.get(new_key), 'spam')
def test_incr_with_pickled_integer(self):
"Testing case where there exists a pickled integer and we increment it"
number = 42
key = self.cache.make_key("key")
# manually set value using the redis client
self.cache._client.set(key, pickle.dumps(number))
new_value = self.cache.incr(key)
self.assertEqual(new_value, number + 1)
# Test that the pickled value was converted to an integer
value = int(self.cache._client.get(key))
self.assertTrue(isinstance(value, int))
# now that the value is an integer, let's increment it again.
new_value = self.cache.incr(key, 7)
self.assertEqual(new_value, number + 8)
self.assertIsNone(self.cache.get(key, version=1))
self.assertEqual(self.cache.get(key, version=2), 'spam')
def test_pickling_cache_object(self):
p = pickle.dumps(self.cache)
......@@ -343,7 +300,7 @@ class RedisCacheTests(TestCase):
# Now let's do a simple operation using the unpickled cache object
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertEqual(result, False)
self.assertFalse(result)
self.assertEqual(cache.get("addkey1"), "value")
def test_float_caching(self):
......@@ -356,24 +313,100 @@ class RedisCacheTests(TestCase):
a = self.cache.get('a')
self.assertEqual(a, '1.1')
def test_multiple_connection_pool_connections(self):
pool._connection_pools = {}
get_cache('redis_cache.cache.CacheClass', LOCATION='127.0.0.1:6379', OPTIONS={'DB': 15})
self.assertEqual(len(pool._connection_pools), 1)
get_cache('redis_cache.cache.CacheClass', LOCATION='127.0.0.1:6379', OPTIONS={'DB': 14})
self.assertEqual(len(pool._connection_pools), 2)
get_cache('redis_cache.cache.CacheClass', LOCATION='127.0.0.1:6379', OPTIONS={'DB': 15})
self.assertEqual(len(pool._connection_pools), 2)
def test_setting_string_integer_retrieves_string(self):
self.assertTrue(self.cache.set("foo", "1"))
self.assertEqual(self.cache.get("foo"), "1")
def test_setting_bool_retrieves_bool(self):
self.assertTrue(self.cache.set("bool_t", True))
self.assertEqual(self.cache.get("bool_t"), True)
self.assertTrue(self.cache.get("bool_t"))
self.assertTrue(self.cache.set("bool_f", False))
self.assertEqual(self.cache.get("bool_f"), False)
self.assertFalse(self.cache.get("bool_f"))
def test_delete_pattern(self):
data = {
'a': 'a',
'b': 'b',
'aa': 'aa',
'bb': 'bb',
'aaa': 'aaa',
'bbb': 'bbb',
}
self.cache.set_many(data)
self.cache.delete_pattern('aa*')
items = self.cache.get_many(data.keys())
self.assertEqual(len(items), 4)
self.cache.delete_pattern('b?b')
items = self.cache.get_many(data.keys())
self.assertEqual(len(items), 3)
def test_clearing_using_version(self):
self.cache.set('a', 'a', version=1)
self.cache.set('b', 'b', version=1)
self.cache.set('a', 'a', version=2)
self.cache.set('b', 'b', version=2)
values = self.cache.get_many(['a', 'b'], version=1)
self.assertEqual(len(values), 2)
values = self.cache.get_many(['a', 'b'], version=2)
self.assertEqual(len(values), 2)
self.cache.clear(version=2)
values = self.cache.get_many(['a', 'b'], version=1)
self.assertEqual(len(values), 2)
values = self.cache.get_many(['a', 'b'], version=2)
self.assertEqual(len(values), 0)
def test_reinsert_keys(self):
self.cache._pickle_version = 0
for i in range(2000):
s = sha1(str(i)).hexdigest()
self.cache.set(s, self.cache)
self.cache._pickle_version = -1
self.cache.reinsert_keys()
def test_ttl_of_reinsert_keys(self):
self.cache.set('a', 'a', 5)
self.assertEqual(self.cache.get('a'), 'a')
self.cache.set('b', 'b', 5)
self.cache.reinsert_keys()
self.assertEqual(self.cache.get('a'), 'a')
self.assertGreater(self.cache.get_client('a').ttl(self.cache.make_key('a')), 1)
self.assertEqual(self.cache.get('b'), 'b')
self.assertGreater(self.cache.get_client('b').ttl(self.cache.make_key('b')), 1)
def test_get_or_set(self):
def expensive_function():
expensive_function.num_calls += 1
return 42
expensive_function.num_calls = 0
self.assertEqual(expensive_function.num_calls, 0)
value = self.cache.get_or_set('a', expensive_function, 1)
self.assertEqual(expensive_function.num_calls, 1)
self.assertEqual(value, 42)
value = self.cache.get_or_set('a', expensive_function, 1)
self.assertEqual(expensive_function.num_calls, 1)
self.assertEqual(value, 42)
value = self.cache.get_or_set('a', expensive_function, 1)
self.assertEqual(expensive_function.num_calls, 1)
self.assertEqual(value, 42)
time.sleep(2)
value = self.cache.get_or_set('a', expensive_function, 1)
self.assertEqual(expensive_function.num_calls, 2)
self.assertEqual(value, 42)
def assertMaxConnection(self, cache, max_num):
for client in cache.clients.itervalues():
self.assertLessEqual(client.connection_pool._created_connections, max_num)
def test_max_connections(self):
pool._connection_pools = {}
......@@ -382,52 +415,58 @@ class RedisCacheTests(TestCase):
def noop(*args, **kwargs):
pass
release = cache._client.connection_pool.release
cache._client.connection_pool.release = noop
releases = {}
for client in cache.clients.itervalues():
releases[client.connection_pool] = client.connection_pool.release
client.connection_pool.release = noop
self.assertEqual(client.connection_pool.max_connections, 2)
cache.set('a', 'a')
self.assertMaxConnection(cache, 1)
cache.set('a', 'a')
self.assertMaxConnection(cache, 2)
with self.assertRaises(redis.ConnectionError):
cache.set('a', 'a')
cache._client.connection_pool.release = release
cache._client.connection_pool.max_connections = 2**31
self.assertMaxConnection(cache, 2)
for client in cache.clients.itervalues():
client.connection_pool.release = releases[client.connection_pool]
client.connection_pool.max_connections = 2 ** 31
def test_has_key_with_no_key(self):
self.assertFalse(self.cache.has_key('does_not_exist'))
def test_has_key_with_key(self):
self.cache.set('a', 'a')
self.assertTrue(self.cache.has_key('a'))
def test_ttl_set_expiry(self):
self.cache.set('a', 'a', 10)
ttl = self.cache.ttl('a')
self.assertAlmostEqual(ttl, 10)
@unittest.skipIf(DEFAULT_TIMEOUT is None, "Version of django doesn't support indefinite timeouts.")
def test_ttl_no_expiry(self):
self.cache.set('a', 'a', timeout=None)
ttl = self.cache.ttl('a')
self.assertTrue(ttl is None)
self.assertIsNone(ttl)
def test_ttl_past_expiry(self):
self.cache.set('a', 'a', timeout=1)
ttl = self.cache.ttl('a')
self.assertAlmostEqual(ttl, 1)
time.sleep(2)
time.sleep(1.1)
ttl = self.cache.ttl('a')
self.assertEqual(ttl, 0)
def test_non_existent_key(self):
""" Non-existent keys are semantically the same as keys that have
"""Non-existent keys are semantically the same as keys that have
expired.
"""
ttl = self.cache.ttl('does_not_exist')
self.assertEqual(ttl, 0)
def test_has_key_with_no_key(self):
self.assertFalse(self.cache.has_key('does_not_exist'))
def test_has_key_with_key(self):
self.cache.set('a', 'a')
self.assertTrue(self.cache.has_key('a'))
if __name__ == '__main__':
import unittest
unittest.main()
from math import sqrt
def mean(lst):
return sum(lst) / len(lst)
def stddev(lst):
"""returns the standard deviation of lst"""
avg = mean(lst)
variance = sum((i - avg) ** 2 for i in lst)
return sqrt(variance)
class MultiServerTests(object):
def test_key_distribution(self):
n = 10000
for i in xrange(n):
self.cache.set(i, i)
keys = [len(client.keys('*')) for client in self.cache.clients.itervalues()]
self.assertTrue(((stddev(keys) / n) * 100.0) < 10)
def test_removing_nodes(self):
c1, c2, c3 = self.cache.clients.keys()
replicas = self.cache.sharder.replicas
self.assertEqual(len(self.cache.sharder._nodes), 3 * replicas)
self.cache.sharder.remove(c1)
self.assertEqual(len(self.cache.sharder._nodes), 2 * replicas)
self.cache.sharder.remove(c2)
self.assertEqual(len(self.cache.sharder._nodes), 1 * replicas)
self.cache.sharder.remove(c3)
self.assertEqual(len(self.cache.sharder._nodes), 0)
# # -*- coding: utf-8 -*-
from tests.testapp.tests.base_tests import BaseRedisTestCase
from tests.testapp.tests.multi_server_tests import MultiServerTests
try:
from django.test import override_settings
except ImportError:
from django.test.utils import override_settings
from django.test import TestCase
from redis_cache.cache import ImproperlyConfigured
from redis.connection import UnixDomainSocketConnection
LOCATION = "unix://:yadayada@/tmp/redis4.sock?db=15"
LOCATIONS = [
"unix://:yadayada@/tmp/redis4.sock?db=15",
"unix://:yadayada@/tmp/redis5.sock?db=15",
"unix://:yadayada@/tmp/redis6.sock?db=15",
]
class SocketTestCase(BaseRedisTestCase, TestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SingleHiredisTestCase(SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SinglePythonParserTestCase(SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultipleHiredisTestCase(MultiServerTests, SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultiplePythonParserTestCase(MultiServerTests, SocketTestCase):
pass
# -*- coding: utf-8 -*-
from tests.testapp.tests.base_tests import BaseRedisTestCase
from tests.testapp.tests.multi_server_tests import MultiServerTests
try:
from django.test import override_settings
except ImportError:
from django.test.utils import override_settings
from django.test import TestCase
from redis_cache.cache import ImproperlyConfigured
from redis.connection import UnixDomainSocketConnection
LOCATION = "127.0.0.1:6381"
LOCATIONS = [
'127.0.0.1:6381',
'127.0.0.1:6382',
'127.0.0.1:6383',
]
class TCPTestCase(BaseRedisTestCase, TestCase):
def test_default_initialization(self):
self.reset_pool()
self.cache = self.get_cache()
client = self.cache.clients[('127.0.0.1', 6381, 15, None)]
connection_class = client.connection_pool.connection_class
if connection_class is not UnixDomainSocketConnection:
self.assertEqual(client.connection_pool.connection_kwargs['host'], '127.0.0.1')
self.assertEqual(client.connection_pool.connection_kwargs['port'], 6381)
self._skip_tearDown = True
self.assertEqual(client.connection_pool.connection_kwargs['db'], 15)
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SingleHiredisTestCase(TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SinglePythonParserTestCase(TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultipleHiredisTestCase(MultiServerTests, TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultiplePythonParserTestCase(MultiServerTests, TCPTestCase):
pass
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment