Commit b94eea68 authored by Sean Bleier's avatar Sean Bleier

Merging version 1.0 into master.

parent 9bf25f77
......@@ -13,4 +13,9 @@ development_settings.py
.pydevproject
.settings
dist/*
dump.rdb
dist/*
MANIFEST
.venv
redis/
*/_build/
language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
- "3.4"
env:
- DJANGO_VERSION=1.5
- DJANGO_VERSION=1.6
- DJANGO_VERSION=1.7
- DJANGO_VERSION=1.8
# command to run tests
script: make test DJANGO_VERSION=$DJANGO_VERSION
branches:
only:
- unstable
- master
SHELL := /bin/bash
PACKAGE_NAME=redis_cache
VENV_DIR?=.venv
VENV_ACTIVATE=$(VENV_DIR)/bin/activate
WITH_VENV=. $(VENV_ACTIVATE);
DJANGO_VERSION?=1.7
default:
python setup.py check build
$(VENV_ACTIVATE): requirements*.txt
test -f $@ || virtualenv --python=python2.7 --system-site-packages $(VENV_DIR)
touch $@
.PHONY: install_requirements
install_requirements: requirements*.txt
$(WITH_VENV) pip install --no-deps -r requirements.txt
$(WITH_VENV) pip install --no-deps -r requirements-dev.txt
$(WITH_VENV) pip install Django==$(DJANGO_VERSION)
.PHONY: venv
venv: $(VENV_ACTIVATE)
.PHONY: setup
setup: venv
.PHONY: redis_servers
redis_servers:
test -d redis || git clone https://github.com/antirez/redis
git -C redis checkout 2.6
make -C redis
for i in 1 2 3; do \
./redis/src/redis-server \
--pidfile /tmp/redis`echo $$i`.pid \
--requirepass yadayada \
--daemonize yes \
--port `echo 638$$i` ; \
done
for i in 4 5 6; do \
./redis/src/redis-server \
--pidfile /tmp/redis`echo $$i`.pid \
--requirepass yadayada \
--daemonize yes \
--port 0 \
--unixsocket /tmp/redis`echo $$i`.sock \
--unixsocketperm 755 ; \
done
.PHONY: clean
clean:
python setup.py clean
rm -rf build/
rm -rf dist/
rm -rf *.egg*/
rm -rf __pycache__/
rm -f MANIFEST
rm -f test.db
find $(PACKAGE_NAME) -type f -name '*.pyc' -delete
.PHONY: teardown
teardown:
rm -rf $(VENV_DIR)/
.PHONY: test
test: venv install_requirements redis_servers
$(WITH_VENV) PYTHONPATH=$(PYTHONPATH): django-admin.py test --settings=tests.settings -s
for i in 1 2 3 4 5 6; do kill `cat /tmp/redis$$i.pid`; done;
.PHONY: shell
shell: venv
$(WITH_VENV) PYTHONPATH=$(PYTHONPATH): django-admin.py shell --settings=tests.settings
......@@ -2,12 +2,33 @@
Redis Django Cache Backend
==========================
A cache backend for Django using the Redis datastructure server.
.. image:: https://pypip.in/download/django-redis-cache/badge.svg
:target: https://pypi.python.org/pypi//django-redis-cache/
:alt: Downloads
.. image:: https://pypip.in/version/django-redis-cache/badge.svg
:target: https://pypi.python.org/pypi/django-redis-cache/
:alt: Latest Version
A Redis cache backend for Django
Changelog
=========
=======
1.0.0
-----
* Deprecate support for django < 1.3 and redis < 2.4. If you need support for those versions,
pin django-redis-cache to a version less than 1.0, i.e. pip install django-redis-cache<1.0
* Application level sharding when a list of locations is provided in the settings.
* Delete keys using wildcard syntax.
* Clear cache using version to delete only keys under that namespace.
* Ability to select pickle protocol version.
* Support for Master-Slave setup
* Thundering herd protection
0.13.0
------
......@@ -23,18 +44,13 @@ Changelog
* Adds `ttl` method to the cache. `cache.ttl(key)` will return the number of
seconds before it expires or None if the key is not volitile.
0.11.1
------
* Allows user to specify the connection pool class kwargs, e.g. timeout,
max_connections, etc.
0.11.0
------
* Adds support for specifying the connection pool class.
* Adds ability to set the max connections for the connection pool.
0.10.0
------
......@@ -56,20 +72,14 @@ when parsing messages from the redis server. redis-py will pick the best
parser for you implicitly, but using the ``PARSER_CLASS`` setting gives you
control and the option to roll your own parser class if you are so bold.
Notes
-----
This cache backend requires the `redis-py`_ Python client library for
communicating with the Redis server.
Redis writes to disk asynchronously so there is a slight chance
of losing some data, but for most purposes this is acceptable.
Requirements
============
In order to use ``redis.connection.HiredisParser`` parser class, you need to
pip install `hiredis`_. This is the recommended parser class.
Usage
-----
`redis-py`_ >= 2.4.5
`redis`_ >= 2.4
`hiredis`_
`python`_ >= 2.5
1. Run ``pip install django-redis-cache``.
......@@ -81,7 +91,11 @@ Usage
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '<host>:<port>',
'LOCATION': [
'<host>:<port>',
'<host>:<port>',
'<host>:<port>',
],
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
......@@ -91,6 +105,8 @@ Usage
'max_connections': 50,
'timeout': 20,
}
'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
......@@ -105,12 +121,102 @@ Usage
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser'
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
},
},
}
.. _redis: http://redis.io
.. _redis-py: http://github.com/andymccurdy/redis-py/
.. _hiredis: https://github.com/pietern/hiredis-py
# For Master-Slave Setup, specify the host:port of the master
# redis-server instance.
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'<host>:<port>',
'<host>:<port>',
'<host>:<port>',
],
'OPTIONS': {
'DB': 1,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'MASTER_CACHE': '<master host>:<master port>',
},
},
}
Usage
=====
django-redis-cache shares the same API as django's built-in cache backends,
with a few exceptions.
``cache.delete_pattern``
Delete keys using glob-style pattern.
example::
>>> from news.models import Story
>>>
>>> most_viewed = Story.objects.most_viewed()
>>> highest_rated = Story.objects.highest_rated()
>>> cache.set('news.stories.most_viewed', most_viewed)
>>> cache.set('news.stories.highest_rated', highest_rated)
>>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed'])
>>> len(data)
2
>>> cache.delete_pattern('news.stores.*')
>>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed'])
>>> len(data)
0
``cache.clear``
Same as django's ``cache.clear``, except that you can optionally specify a
version and all keys with that version will be deleted. If no version is
provided, all keys are flushed from the cache.
``cache.reinsert_keys``
This helper method retrieves all keys and inserts them back into the cache. This
is useful when changing the pickle protocol number of all the cache entries.
As of django-redis-cache < 1.0, all cache entries were pickled using version 0.
To reduce the memory footprint of the redis-server, simply run this method to
upgrade cache entries to the latest protocol.
Thundering Herd Protection
==========================
A common problem with caching is that you can sometimes get into a situation
where you have a value that takes a long time to compute or retrieve, but have
clients accessing it a lot. For example, if you wanted to retrieve the latest
tweets from the twitter api, you probably want to cache the response for a number
of minutes so you don't exceed your rate limit. However, when the cache entry
expires you can have mulitple clients that see there is no entry and try to
simultaneously fetch the latest results from the api.
The way to get around this problem you pass in a callable and timeout to
``get_or_set``, which will check the cache to see if you need to compute the
value. If it does, then the cache sets a placeholder that tells future clients
to serve data from the stale cache until the new value is created.
Example::
tweets = cache.get_or_set('tweets', twitter.get_newest, timeout=300)
Running Tests
=============
``make test``
.. _redis-py: http://github.com/andymccurdy/redis-py/
.. _redis: http://github.com/antirez/redis/
.. _hiredis: http://github.com/antirez/hiredis/
.. _python: http://python.org
from redis_cache.cache import RedisCache
from redis_cache.backends.single import RedisCache
from redis_cache.backends.multiple import ShardedRedisCache
This diff is collapsed.
from collections import defaultdict
from django.core.exceptions import ImproperlyConfigured
from redis_cache.backends.base import BaseRedisCache
from redis_cache.compat import DEFAULT_TIMEOUT
from redis_cache.sharder import HashRing
class ShardedRedisCache(BaseRedisCache):
def __init__(self, server, params):
super(ShardedRedisCache, self).__init__(server, params)
self._params = params
self._server = server
self._pickle_version = None
self.__master_client = None
self.clients = {}
self.sharder = HashRing()
if not isinstance(server, (list, tuple)):
servers = [server]
else:
servers = server
for server in servers:
client = self.create_client(server)
self.clients[client.connection_pool.connection_identifier] = client
self.sharder.add(client.connection_pool.connection_identifier)
@property
def master_client(self):
"""
Get the write server:port of the master cache
"""
if not hasattr(self, '_master_client') and self.__master_client is None:
cache = self.options.get('MASTER_CACHE', None)
if cache is None:
self._master_client = None
else:
self._master_client = None
try:
host, port = cache.split(":")
except ValueError:
raise ImproperlyConfigured("MASTER_CACHE must be in the form <host>:<port>")
for client in self.clients:
connection_kwargs = client.connection_pool.connection_kwargs
if connection_kwargs['host'] == host and connection_kwargs['port'] == int(port):
self._master_client = client
break
if self._master_client is None:
raise ImproperlyConfigured("%s is not in the list of available redis-server instances." % cache)
return self._master_client
def get_client(self, key, for_write=False):
if for_write and self.master_client is not None:
return self.master_client
node = self.sharder.get_node(unicode(key))
return self.clients[node]
def shard(self, keys, for_write=False, version=None):
"""
Returns a dict of keys that belong to a cache's keyspace.
"""
clients = defaultdict(list)
for key in keys:
clients[self.get_client(key, for_write)].append(self.make_key(key, version))
return clients
####################
# Django cache api #
####################
def add(self, key, value, timeout=None, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
client = self.get_client(key)
key = self.make_key(key, version=version)
return self._add(client, key, value, timeout)
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
client = self.get_client(key)
key = self.make_key(key, version=version)
return self._get(client, key, default)
def set(self, key, value, timeout=None, version=None, client=None):
"""
Persist a value to the cache, and set an optional expiration time.
"""
if client is None:
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._set(key, value, timeout, client=client)
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._delete(client, key)
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
clients = self.shard(keys, for_write=True, version=version)
for client, keys in clients.items():
self._delete_many(client, keys)
def clear(self, version=None):
"""
Flush cache keys.
If version is specified, all keys belonging the version's key
namespace will be deleted. Otherwise, all keys will be deleted.
"""
if version is None:
if self.master_client is None:
for client in self.clients.itervalues():
self._clear(client)
else:
self._clear(self.master_client)
else:
self.delete_pattern('*', version=version)
def get_many(self, keys, version=None):
data = {}
clients = self.shard(keys, version=version)
for client, versioned_keys in clients.items():
original_keys = [key._original_key for key in versioned_keys]
data.update(self._get_many(client, original_keys, versioned_keys=versioned_keys))
return data
def set_many(self, data, timeout=None, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
clients = self.shard(data.keys(), for_write=True, version=version)
if timeout is None:
for client, keys in clients.items():
subset = {}
for key in keys:
subset[key] = data[key._original_key]
self._set_many(client, subset)
return
for client, keys in clients.items():
pipeline = client.pipeline()
for key in keys:
self._set(key, data[key._original_key], timeout, client=pipeline)
pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._incr(client, key, delta=delta)
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
"""
if version is None:
version = self.version
client = self.get_client(key, for_write=True)
old = self.make_key(key, version=version)
new = self.make_key(key, version=version + delta)
return self._incr_version(client, old, new, delta, version)
#####################
# Extra api methods #
#####################
def has_key(self, key, version=None):
client = self.get_client(key, for_write=False)
return self._has_key(client, key, version)
def ttl(self, key, version=None):
client = self.get_client(key, for_write=False)
key = self.make_key(key, version=version)
return self._ttl(client, key)
def delete_pattern(self, pattern, version=None):
pattern = self.make_key(pattern, version=version)
if self.master_client is None:
for client in self.clients.itervalues():
self._delete_pattern(client, pattern)
else:
self._delete_pattern(self.master_client, pattern)
def get_or_set(self, key, func, timeout=None, version=None):
client = self.get_client(key, for_write=True)
key = self.make_key(key, version=version)
return self._get_or_set(client, key, func, timeout)
def reinsert_keys(self):
"""
Reinsert cache entries using the current pickle protocol version.
"""
for client in self.clients.itervalues():
self._reinsert_keys(client)
print
try:
import cPickle as pickle
except ImportError:
import pickle
from redis_cache.backends.base import BaseRedisCache
from redis_cache.compat import bytes_type, DEFAULT_TIMEOUT
class RedisCache(BaseRedisCache):
def __init__(self, server, params):
"""
Connect to Redis, and set up cache backend.
"""
super(RedisCache, self).__init__(server, params)
if not isinstance(server, bytes_type):
self._server, = server
self.client = self.create_client(server)
self.clients = {
self.client.connection_pool.connection_identifier: self.client
}
def get_client(self, *args):
return self.client
####################
# Django cache api #
####################
def add(self, key, value, timeout=None, version=None):
"""
Add a value to the cache, failing if the key already exists.
Returns ``True`` if the object was added, ``False`` if not.
"""
key = self.make_key(key, version=version)
return self._add(self.client, key, value, timeout)
def get(self, key, default=None, version=None):
"""
Retrieve a value from the cache.
Returns unpickled value if key is found, the default if not.
"""
key = self.make_key(key, version=version)
return self._get(self.client, key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None):
"""
Persist a value to the cache, and set an optional expiration time.
"""
key = self.make_key(key, version=version)
return self._set(key, value, timeout, client=self.client)
def delete(self, key, version=None):
"""
Remove a key from the cache.
"""
key = self.make_key(key, version=version)
return self._delete(self.client, key)
def delete_many(self, keys, version=None):
"""
Remove multiple keys at once.
"""
versioned_keys = self.make_keys(keys, version=version)
self._delete_many(self.client, versioned_keys)
def clear(self, version=None):
"""
Flush cache keys.
If version is specified, all keys belonging the version's key
namespace will be deleted. Otherwise, all keys will be deleted.
"""
if version is None:
self._clear(self.client)
else:
self.delete_pattern('*', version=version)
def get_many(self, keys, version=None):
versioned_keys = self.make_keys(keys, version=version)
return self._get_many(self.client, keys, versioned_keys=versioned_keys)
def set_many(self, data, timeout=None, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. This is much more efficient than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
versioned_keys = self.make_keys(data.keys())
if timeout is None:
new_data = {}
for key in versioned_keys:
new_data[key] = data[key._original_key]
return self._set_many(self.client, new_data)
pipeline = self.client.pipeline()
for key in versioned_keys:
self._set(key, data[key._original_key], timeout, client=pipeline)
pipeline.execute()
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
key = self.make_key(key, version=version)
return self._incr(self.client, key, delta=delta)
def incr_version(self, key, delta=1, version=None):
"""
Adds delta to the cache version for the supplied key. Returns the
new version.
"""
if version is None:
version = self.version
old = self.make_key(key, version)
new = self.make_key(key, version=version + delta)
return self._incr_version(self.client, old, new, delta, version)
#####################
# Extra api methods #
#####################
def has_key(self, key, version=None):
return self._has_key(self.client, key, version)
def ttl(self, key, version=None):
key = self.make_key(key, version=version)
return self._ttl(self.client, key)
def delete_pattern(self, pattern, version=None):
pattern = self.make_key(pattern, version=version)
self._delete_pattern(self.client, pattern)
def get_or_set(self, key, func, timeout=None, version=None):
key = self.make_key(key, version=version)
return self._get_or_set(self.client, key, func, timeout)
def reinsert_keys(self):
"""
Reinsert cache entries using the current pickle protocol version.
"""
self._reinsert_keys(self.client)
This diff is collapsed.
from redis.connection import UnixDomainSocketConnection, Connection
class CacheConnectionPool(object):
def __init__(self):
self._connection_pools = {}
def get_connection_pool(
self,
host='127.0.0.1',
port=6379,
db=1,
password=None,
parser_class=None,
unix_socket_path=None,
connection_pool_class=None,
connection_pool_class_kwargs=None,
**kwargs
):
connection_identifier = (
host, port, db, unix_socket_path
)
pool = self._connection_pools.get(connection_identifier)
if pool is None:
connection_class = (
unix_socket_path and UnixDomainSocketConnection or Connection
)
kwargs = {
'db': db,
'password': password,
'connection_class': connection_class,
'parser_class': parser_class,
}
kwargs.update(connection_pool_class_kwargs)
if unix_socket_path is None:
kwargs.update({
'host': host,
'port': port,
})
else:
kwargs['path'] = unix_socket_path
self._connection_pools[connection_identifier] = connection_pool_class(**kwargs)
self._connection_pools[connection_identifier].connection_identifier = (
connection_identifier
)
return self._connection_pools[connection_identifier]
pool = CacheConnectionPool()
from bisect import insort, bisect
from hashlib import md5
from math import log
import sys
#from django.utils.encoding import smart_str
DIGITS = int(log(sys.maxint) / log(16))
def make_hash(s):
return int(md5(s.encode('utf-8')).hexdigest()[:DIGITS], 16)
class Node(object):
def __init__(self, node, i):
self._node = node
self._position = make_hash("%d:%s" % (i, str(self._node)))
def __cmp__(self, other):
if isinstance(other, int):
return cmp(self._position, other)
elif isinstance(other, Node):
return cmp(self._position, other._position)
raise TypeError('Cannot compare this class with "%s" type' % type(other))
def __eq__(self, other):
return self._node == other._node
class HashRing(object):
def __init__(self, replicas=16):
self.replicas = replicas
self._nodes = []
def _add(self, node, i):
insort(self._nodes, Node(node, i))
def add(self, node, weight=1):
for i in xrange(weight * self.replicas):
self._add(node, i)
def remove(self, node):
n = len(self._nodes)
for i, _node in enumerate(reversed(self._nodes)):
if node == _node._node:
del self._nodes[n - i - 1]
def get_node(self, key):
i = bisect(self._nodes, make_hash(key)) - 1
return self._nodes[i]._node
from redis_cache.compat import smart_text, python_2_unicode_compatible
@python_2_unicode_compatible
class CacheKey(object):
"""
A stub string class that we can use to check if a key was created already.
"""
def __init__(self, key, versioned_key):
self._original_key = key
self._versioned_key = versioned_key
def __eq__(self, other):
return self._versioned_key == other
def __unicode__(self):
return smart_text(self._versioned_key)
__repr__ = __str__ = __unicode__
hiredis==0.2.0
django-nose==1.4
nose==1.3.6
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.13.1",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
name="django-redis-cache",
url="http://github.com/sebleier/django-redis-cache/",
author="Sean Bleier",
author_email="sebleier@gmail.com",
version="1.0.0a",
packages=["redis_cache"],
description="Redis Cache Backend for Django",
install_requires=['redis>=2.4.5'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
......
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
cache_settings = {
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
'INSTALLED_APPS': [
'tests.testapp',
],
'ROOT_URLCONF': 'tests.urls',
'CACHES': {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '/tmp/redis.sock',
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser'
},
},
},
}
if not settings.configured:
settings.configure(**cache_settings)
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args):
if not test_args:
test_args = ['testapp']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
runner = DjangoTestSuiteRunner(verbosity=1, interactive=True, failfast=False)
failures = runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
#!/usr/bin/env python
import sys
from os.path import dirname, abspath, join
import django
from django.conf import settings
cache_settings = {
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
'MIDDLEWARE_CLASSES':(),
'INSTALLED_APPS': [
'testapp',
],
'ROOT_URLCONF': 'tests.urls',
'CACHES': {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'DB': 15,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2
}
},
},
},
}
if not settings.configured:
settings.configure(**cache_settings)
try:
from django.test.simple import DjangoTestSuiteRunner as TestSuiteRunner
except ImportError:
from django.test.runner import DiscoverRunner as TestSuiteRunner
def runtests(*test_args):
if not test_args:
test_args = ['testapp']
sys.path.insert(0, join(dirname(abspath(__file__)), 'tests'))
try:
django.setup()
except AttributeError:
pass
runner = TestSuiteRunner(verbosity=1, interactive=True, failfast=False)
failures = runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
"""
A quick and dirty benchmarking script. GitPython is an optional dependency
which you can use to change branches via the command line.
Usage::
python benchmark.py
python benchmark.py master
python benchamrk.py some-branch
"""
import os
import sys
from time import time
from django.core import cache
from hashlib import sha1 as sha
try:
from git import Repo
except ImportError:
pass
else:
if len(sys.argv) > 1:
repo_path = os.path.dirname(__file__)
repo = Repo(repo_path)
repo.branches[sys.argv[1]].checkout()
print "Testing %s" % repo.active_branch
def h(value):
return sha(str(value)).hexdigest()
class BenchmarkRegistry(type):
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'benchmarks'):
cls.benchmarks = []
else:
cls.benchmarks.append(cls)
class Benchmark(object):
__metaclass__ = BenchmarkRegistry
def setUp(self):
pass
def tearDown(self):
pass
def timetrial(self):
self.setUp()
start = time()
self.run()
t = time() - start
self.tearDown()
return t
def run(self):
pass
@classmethod
def run_benchmarks(cls):
for benchmark in cls.benchmarks:
benchmark = benchmark()
print benchmark.__doc__
print "Time: %s" % (benchmark.timetrial())
class GetAndSetBenchmark(Benchmark):
"Settings and Getting Mixed"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
def run(self):
for k, v in self.values.items():
self.cache.set(k, v)
for k, v in self.values.items():
value = self.cache.get(k)
class IncrBenchmark(Benchmark):
"Incrementing integers"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
self.ints = []
self.strings = []
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
self.ints.append(i)
self.strings.append(h(i))
def run(self):
for i in self.ints:
self.cache.incr(h(i), 100)
class MsetAndMGet(Benchmark):
"Getting and setting many mixed values"
def setUp(self):
self.cache = cache.get_cache('default')
self.values = {}
for i in range(30000):
self.values[h(i)] = i
self.values[h(h(i))] = h(i)
def run(self):
self.cache.set_many(self.values)
value = self.cache.get_many(self.values.keys())
if __name__ == "__main__":
Benchmark.run_benchmarks()
\ No newline at end of file
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
......@@ -7,21 +7,28 @@ DATABASES = {
}
INSTALLED_APPS = [
'django_nose',
'tests.testapp',
]
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = "shh...it's a seakret"
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': { # optional
'LOCATION': '127.0.0.1:6381',
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'MAX_CONNECTIONS': 2,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'blabla'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
from math import sqrt
def mean(lst):
return sum(lst) / len(lst)
def stddev(lst):
"""returns the standard deviation of lst"""
avg = mean(lst)
variance = sum((i - avg) ** 2 for i in lst)
return sqrt(variance)
class MultiServerTests(object):
def test_key_distribution(self):
n = 10000
for i in xrange(n):
self.cache.set(i, i)
keys = [len(client.keys('*')) for client in self.cache.clients.itervalues()]
self.assertTrue(((stddev(keys) / n) * 100.0) < 10)
def test_removing_nodes(self):
c1, c2, c3 = self.cache.clients.keys()
replicas = self.cache.sharder.replicas
self.assertEqual(len(self.cache.sharder._nodes), 3 * replicas)
self.cache.sharder.remove(c1)
self.assertEqual(len(self.cache.sharder._nodes), 2 * replicas)
self.cache.sharder.remove(c2)
self.assertEqual(len(self.cache.sharder._nodes), 1 * replicas)
self.cache.sharder.remove(c3)
self.assertEqual(len(self.cache.sharder._nodes), 0)
# # -*- coding: utf-8 -*-
from tests.testapp.tests.base_tests import BaseRedisTestCase
from tests.testapp.tests.multi_server_tests import MultiServerTests
try:
from django.test import override_settings
except ImportError:
from django.test.utils import override_settings
from django.test import TestCase
from redis_cache.cache import ImproperlyConfigured
from redis.connection import UnixDomainSocketConnection
LOCATION = "unix://:yadayada@/tmp/redis4.sock?db=15"
LOCATIONS = [
"unix://:yadayada@/tmp/redis4.sock?db=15",
"unix://:yadayada@/tmp/redis5.sock?db=15",
"unix://:yadayada@/tmp/redis6.sock?db=15",
]
class SocketTestCase(BaseRedisTestCase, TestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SingleHiredisTestCase(SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SinglePythonParserTestCase(SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultipleHiredisTestCase(MultiServerTests, SocketTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultiplePythonParserTestCase(MultiServerTests, SocketTestCase):
pass
# -*- coding: utf-8 -*-
from tests.testapp.tests.base_tests import BaseRedisTestCase
from tests.testapp.tests.multi_server_tests import MultiServerTests
try:
from django.test import override_settings
except ImportError:
from django.test.utils import override_settings
from django.test import TestCase
from redis_cache.cache import ImproperlyConfigured
from redis.connection import UnixDomainSocketConnection
LOCATION = "127.0.0.1:6381"
LOCATIONS = [
'127.0.0.1:6381',
'127.0.0.1:6382',
'127.0.0.1:6383',
]
class TCPTestCase(BaseRedisTestCase, TestCase):
def test_default_initialization(self):
self.reset_pool()
self.cache = self.get_cache()
client = self.cache.clients[('127.0.0.1', 6381, 15, None)]
connection_class = client.connection_pool.connection_class
if connection_class is not UnixDomainSocketConnection:
self.assertEqual(client.connection_pool.connection_kwargs['host'], '127.0.0.1')
self.assertEqual(client.connection_pool.connection_kwargs['port'], 6381)
self._skip_tearDown = True
self.assertEqual(client.connection_pool.connection_kwargs['db'], 15)
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SingleHiredisTestCase(TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': LOCATION,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class SinglePythonParserTestCase(TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultipleHiredisTestCase(MultiServerTests, TCPTestCase):
pass
@override_settings(
CACHES={
'default': {
'BACKEND': 'redis_cache.ShardedRedisCache',
'LOCATION': LOCATIONS,
'OPTIONS': {
'DB': 15,
'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.PythonParser',
'PICKLE_VERSION': 2,
'CONNECTION_POOL_CLASS': 'redis.ConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 2,
}
},
},
}
)
class MultiplePythonParserTestCase(MultiServerTests, TCPTestCase):
pass
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment