diff --git a/README.rst b/README.rst index 4d55092d..12fa5125 100644 --- a/README.rst +++ b/README.rst @@ -184,6 +184,19 @@ Requirements }, } + # For Cluster Setup + # redis-server instance. + CACHES = { + 'default': { + 'BACKEND': 'redis_cache.RedisClusterCache', + 'LOCATION': [ + ':', + ':', + ':', + ] + }, + } + Usage diff --git a/redis_cache/__init__.py b/redis_cache/__init__.py index fb73f2b6..2190148b 100644 --- a/redis_cache/__init__.py +++ b/redis_cache/__init__.py @@ -1,3 +1,4 @@ from redis_cache.backends.single import RedisCache from redis_cache.backends.multiple import ShardedRedisCache from redis_cache.backends.dummy import RedisDummyCache +from redis_cache.backends.cluster import RedisClusterCache diff --git a/redis_cache/backends/cluster.py b/redis_cache/backends/cluster.py new file mode 100644 index 00000000..6427166f --- /dev/null +++ b/redis_cache/backends/cluster.py @@ -0,0 +1,115 @@ +from rediscluster import RedisCluster + +from redis_cache.compat import DEFAULT_TIMEOUT +from redis_cache.utils import parse_connection_kwargs + +try: + import cPickle as pickle +except ImportError: + import pickle +import random + +from redis_cache.backends.base import BaseRedisCache + + +class RedisClusterCache(BaseRedisCache): + def __init__(self, server, params): + """ + Connect to Redis, and set up cache backend. + """ + super(RedisClusterCache, self).__init__(server, params) + + conn_params = { + 'startup_nodes': [] + } + + for server in self.servers: + server_params = parse_connection_kwargs(server) + conn_params['startup_nodes'].append(server_params) + + client = RedisCluster(**conn_params) + self.clients['cluster'] = client + + self.client_list = self.clients.values() + self.master_client = self.get_master_client() + + def get_client(self, key, write=False): + if write and self.master_client is not None: + return self.master_client + return random.choice(list(self.client_list)) + + #################### + # Django cache api # + #################### + + def delete_many(self, keys, version=None): + """Remove multiple keys at once.""" + versioned_keys = self.make_keys(keys, version=version) + if versioned_keys: + self._delete_many(self.master_client, versioned_keys) + + def clear(self, version=None): + """Flush cache keys. + + If version is specified, all keys belonging the version's key + namespace will be deleted. Otherwise, all keys will be deleted. + """ + if version is None: + self._clear(self.master_client) + else: + self.delete_pattern('*', version=version) + + def get_many(self, keys, version=None): + versioned_keys = self.make_keys(keys, version=version) + return self._get_many(self.master_client, keys, versioned_keys=versioned_keys) + + def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): + """ + Set a bunch of values in the cache at once from a dict of key/value + pairs. This is much more efficient than calling set() multiple times. + + If timeout is given, that timeout will be used for the key; otherwise + the default cache timeout will be used. + """ + timeout = self.get_timeout(timeout) + + versioned_keys = self.make_keys(data.keys(), version=version) + if timeout is None: + new_data = {} + for key in versioned_keys: + new_data[key] = self.prep_value(data[key._original_key]) + return self._set_many(self.master_client, new_data) + + pipeline = self.master_client.pipeline() + for key in versioned_keys: + value = self.prep_value(data[key._original_key]) + self._set(pipeline, key, value, timeout) + pipeline.execute() + + def incr_version(self, key, delta=1, version=None): + """ + Adds delta to the cache version for the supplied key. Returns the + new version. + + """ + if version is None: + version = self.version + + old = self.make_key(key, version) + new = self.make_key(key, version=version + delta) + + return self._incr_version(self.master_client, old, new, delta, version) + + ##################### + # Extra api methods # + ##################### + + def delete_pattern(self, pattern, version=None): + pattern = self.make_key(pattern, version=version) + self._delete_pattern(self.master_client, pattern) + + def reinsert_keys(self): + """ + Reinsert cache entries using the current pickle protocol version. + """ + self._reinsert_keys(self.master_client) diff --git a/requirements-dev.txt b/requirements-dev.txt index 38159a0d..ebd4c236 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,3 +4,4 @@ nose==1.3.6 unittest2==1.0.1 msgpack-python==0.4.6 pyyaml==3.11 +sqlparse>=0.5.0 # not directly required, pinned by Snyk to avoid a vulnerability diff --git a/requirements.txt b/requirements.txt index 009cbe7f..0b5e50b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ redis>=2.10.3 +redis-py-cluster>=1.1.0 diff --git a/setup.py b/setup.py index 9c1f21ea..f2f7a1e4 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ version="1.6.5", packages=["redis_cache", "redis_cache.backends"], description="Redis Cache Backend for Django", - install_requires=['redis>=2.10.3'], + install_requires=['redis>=2.10.3', 'redis-py-cluster>=1.1.0'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.6",