[Python-modules-commits] [django-redis] 01/03: import django-redis_4.6.0.orig.tar.gz
Scott Kitterman
kitterman at moszumanska.debian.org
Fri Nov 11 13:59:34 UTC 2016
This is an automated email from the git hooks/post-receive script.
kitterman pushed a commit to branch master
in repository django-redis.
commit c18286d12552276fc0cdbaffeedff7f466c932f7
Author: Scott Kitterman <scott at kitterman.com>
Date: Fri Nov 11 08:06:25 2016 -0500
import django-redis_4.6.0.orig.tar.gz
---
AUTHORS.rst | 14 +
LICENSE | 26 +
MANIFEST.in | 5 +
PKG-INFO | 27 +
README.rst | 26 +
django_redis.egg-info/PKG-INFO | 27 +
django_redis.egg-info/SOURCES.txt | 61 ++
django_redis.egg-info/dependency_links.txt | 1 +
django_redis.egg-info/not-zip-safe | 1 +
django_redis.egg-info/requires.txt | 1 +
django_redis.egg-info/top_level.txt | 1 +
django_redis/__init__.py | 22 +
django_redis/cache.py | 151 +++
django_redis/client/__init__.py | 8 +
django_redis/client/default.py | 510 +++++++++++
django_redis/client/herd.py | 154 ++++
django_redis/client/sharded.py | 262 ++++++
django_redis/compressors/__init__.py | 0
django_redis/compressors/base.py | 11 +
django_redis/compressors/identity.py | 10 +
django_redis/compressors/lzma.py | 24 +
django_redis/compressors/zlib.py | 24 +
django_redis/exceptions.py | 23 +
django_redis/hash_ring.py | 61 ++
django_redis/pool.py | 129 +++
django_redis/serializers/__init__.py | 1 +
django_redis/serializers/base.py | 13 +
django_redis/serializers/json.py | 18 +
django_redis/serializers/msgpack.py | 15 +
django_redis/serializers/pickle.py | 36 +
django_redis/util.py | 67 ++
doc/Makefile | 9 +
doc/content-docinfo.html | 2 +
doc/content.adoc | 820 +++++++++++++++++
doc/dist/latest/index.html | 1090 ++++++++++++++++++++++
doc/index.html | 1366 ++++++++++++++++++++++++++++
setup.cfg | 5 +
setup.py | 50 +
tests/README.txt | 22 +
tests/hashring_test/__init__.py | 1 +
tests/hashring_test/models.py | 3 +
tests/hashring_test/tests.py | 39 +
tests/redis_backend_testapp/__init__.py | 0
tests/redis_backend_testapp/models.py | 0
tests/redis_backend_testapp/tests.py | 904 ++++++++++++++++++
tests/runtests-fakeredis.py | 16 +
tests/runtests-herd.py | 16 +
tests/runtests-json.py | 15 +
tests/runtests-msgpack.py | 15 +
tests/runtests-sharded.py | 15 +
tests/runtests-unixsockets.py | 13 +
tests/runtests-zlib.py | 16 +
tests/runtests.py | 16 +
tests/shell.py | 12 +
tests/test_fakeredis.py | 60 ++
tests/test_sqlite.py | 54 ++
tests/test_sqlite_herd.py | 69 ++
tests/test_sqlite_json.py | 58 ++
tests/test_sqlite_msgpack.py | 58 ++
tests/test_sqlite_sharding.py | 74 ++
tests/test_sqlite_usock.py | 69 ++
tests/test_sqlite_zlib.py | 58 ++
tests/urls.py | 3 +
63 files changed, 6677 insertions(+)
diff --git a/AUTHORS.rst b/AUTHORS.rst
new file mode 100644
index 0000000..07f7a0d
--- /dev/null
+++ b/AUTHORS.rst
@@ -0,0 +1,14 @@
+Andrei Antoukh / niwibe <https://github.com/niwibe>
+Sean Bleier <http://github.com/sebleier>
+Matt Dennewitz <http://github.com/blackbrrr>
+Jannis Leidel <http://github.com/jezdez>
+S. Angel / Twidi <http://github.com/twidi>
+Noah Kantrowitz / coderanger <http://github.com/coderanger>
+Martin Mahner / bartTC <http://github.com/bartTC>
+Timothée Peignier / cyberdelia <https://github.com/cyberdelia>
+Lior Sion / liorsion <https://github.com/liorsion>
+Ales Zoulek / aleszoulek <https://github.com/aleszoulek>
+James Aylett / jaylett <https://github.com/jaylett>
+Todd Boland / boland <https://github.com/boland>
+David Zderic / dzderic <https://github.com/dzderic>
+Kirill Zaitsev / teferi <https://github.com/teferi>
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..2cd3894
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c) 2011-2016 Andrey Antukh <niwi at niwi.nz>
+Copyright (c) 2011 Sean Bleier
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..4f52ab6
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,5 @@
+include LICENSE
+include AUTHORS.rst
+include README.rst
+recursive-include tests README.txt *.py
+recursive-include doc Makefile *.adoc *.html
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..63c7086
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,27 @@
+Metadata-Version: 1.1
+Name: django-redis
+Version: 4.6.0
+Summary: Full featured redis cache backend for Django.
+Home-page: https://github.com/niwibe/django-redis
+Author: Andrei Antoukh
+Author-email: niwi at niwi.nz
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Framework :: Django :: 1.8
+Classifier: Framework :: Django :: 1.9
+Classifier: Framework :: Django :: 1.10
+Classifier: Framework :: Django
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..b91a619
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,26 @@
+==============================
+Redis cache backend for Django
+==============================
+
+Full featured redis cache backend for Django.
+
+.. image:: https://img.shields.io/travis/niwinz/django-redis.svg?style=flat
+ :target: https://travis-ci.org/niwinz/django-redis
+
+.. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat
+ :target: https://pypi.python.org/pypi/django-redis
+
+
+Documentation
+-------------
+
+http://niwinz.github.io/django-redis/latest/
+
+
+How to install
+--------------
+
+Run ``python setup.py install`` to install,
+or place ``django_redis`` on your Python path.
+
+You can also install it with: ``pip install django-redis``
diff --git a/django_redis.egg-info/PKG-INFO b/django_redis.egg-info/PKG-INFO
new file mode 100644
index 0000000..63c7086
--- /dev/null
+++ b/django_redis.egg-info/PKG-INFO
@@ -0,0 +1,27 @@
+Metadata-Version: 1.1
+Name: django-redis
+Version: 4.6.0
+Summary: Full featured redis cache backend for Django.
+Home-page: https://github.com/niwibe/django-redis
+Author: Andrei Antoukh
+Author-email: niwi at niwi.nz
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Framework :: Django :: 1.8
+Classifier: Framework :: Django :: 1.9
+Classifier: Framework :: Django :: 1.10
+Classifier: Framework :: Django
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
diff --git a/django_redis.egg-info/SOURCES.txt b/django_redis.egg-info/SOURCES.txt
new file mode 100644
index 0000000..33933f0
--- /dev/null
+++ b/django_redis.egg-info/SOURCES.txt
@@ -0,0 +1,61 @@
+AUTHORS.rst
+LICENSE
+MANIFEST.in
+README.rst
+setup.py
+django_redis/__init__.py
+django_redis/cache.py
+django_redis/exceptions.py
+django_redis/hash_ring.py
+django_redis/pool.py
+django_redis/util.py
+django_redis.egg-info/PKG-INFO
+django_redis.egg-info/SOURCES.txt
+django_redis.egg-info/dependency_links.txt
+django_redis.egg-info/not-zip-safe
+django_redis.egg-info/requires.txt
+django_redis.egg-info/top_level.txt
+django_redis/client/__init__.py
+django_redis/client/default.py
+django_redis/client/herd.py
+django_redis/client/sharded.py
+django_redis/compressors/__init__.py
+django_redis/compressors/base.py
+django_redis/compressors/identity.py
+django_redis/compressors/lzma.py
+django_redis/compressors/zlib.py
+django_redis/serializers/__init__.py
+django_redis/serializers/base.py
+django_redis/serializers/json.py
+django_redis/serializers/msgpack.py
+django_redis/serializers/pickle.py
+doc/Makefile
+doc/content-docinfo.html
+doc/content.adoc
+doc/index.html
+doc/dist/latest/index.html
+tests/README.txt
+tests/runtests-fakeredis.py
+tests/runtests-herd.py
+tests/runtests-json.py
+tests/runtests-msgpack.py
+tests/runtests-sharded.py
+tests/runtests-unixsockets.py
+tests/runtests-zlib.py
+tests/runtests.py
+tests/shell.py
+tests/test_fakeredis.py
+tests/test_sqlite.py
+tests/test_sqlite_herd.py
+tests/test_sqlite_json.py
+tests/test_sqlite_msgpack.py
+tests/test_sqlite_sharding.py
+tests/test_sqlite_usock.py
+tests/test_sqlite_zlib.py
+tests/urls.py
+tests/hashring_test/__init__.py
+tests/hashring_test/models.py
+tests/hashring_test/tests.py
+tests/redis_backend_testapp/__init__.py
+tests/redis_backend_testapp/models.py
+tests/redis_backend_testapp/tests.py
\ No newline at end of file
diff --git a/django_redis.egg-info/dependency_links.txt b/django_redis.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/django_redis.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/django_redis.egg-info/not-zip-safe b/django_redis.egg-info/not-zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/django_redis.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/django_redis.egg-info/requires.txt b/django_redis.egg-info/requires.txt
new file mode 100644
index 0000000..a80376c
--- /dev/null
+++ b/django_redis.egg-info/requires.txt
@@ -0,0 +1 @@
+redis>=2.10.0
diff --git a/django_redis.egg-info/top_level.txt b/django_redis.egg-info/top_level.txt
new file mode 100644
index 0000000..524e6f0
--- /dev/null
+++ b/django_redis.egg-info/top_level.txt
@@ -0,0 +1 @@
+django_redis
diff --git a/django_redis/__init__.py b/django_redis/__init__.py
new file mode 100644
index 0000000..6a44444
--- /dev/null
+++ b/django_redis/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+VERSION = (4, 6, 0)
+__version__ = '.'.join(map(str, VERSION))
+
+
+def get_redis_connection(alias='default', write=True):
+ """
+ Helper used for obtaining a raw redis client.
+ """
+
+ from django.core.cache import caches
+
+ cache = caches[alias]
+
+ if not hasattr(cache, "client"):
+ raise NotImplementedError("This backend does not support this feature")
+
+ if not hasattr(cache.client, "get_client"):
+ raise NotImplementedError("This backend does not support this feature")
+
+ return cache.client.get_client(write)
diff --git a/django_redis/cache.py b/django_redis/cache.py
new file mode 100644
index 0000000..d584ae2
--- /dev/null
+++ b/django_redis/cache.py
@@ -0,0 +1,151 @@
+import functools
+import warnings
+import logging
+
+from django.conf import settings
+from django.core.cache.backends.base import BaseCache
+
+from .util import load_class
+from .exceptions import ConnectionInterrupted
+
+DJANGO_REDIS_IGNORE_EXCEPTIONS = getattr(settings, "DJANGO_REDIS_IGNORE_EXCEPTIONS", False)
+DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = getattr(settings, "DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS", False)
+DJANGO_REDIS_LOGGER = getattr(settings, "DJANGO_REDIS_LOGGER", False)
+
+
+if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS:
+ logger = logging.getLogger((DJANGO_REDIS_LOGGER or __name__))
+
+
+def omit_exception(method=None, return_value=None):
+ """
+ Simple decorator that intercepts connection
+ errors and ignores these if settings specify this.
+ """
+
+ if method is None:
+ return functools.partial(omit_exception, return_value=return_value)
+
+ @functools.wraps(method)
+ def _decorator(self, *args, **kwargs):
+ try:
+ return method(self, *args, **kwargs)
+ except ConnectionInterrupted as e:
+ if self._ignore_exceptions:
+ if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS:
+ logger.error(str(e))
+
+ return return_value
+ raise e.parent
+ return _decorator
+
+
+class RedisCache(BaseCache):
+ def __init__(self, server, params):
+ super(RedisCache, self).__init__(params)
+ self._server = server
+ self._params = params
+
+ options = params.get("OPTIONS", {})
+ self._client_cls = options.get("CLIENT_CLASS", "django_redis.client.DefaultClient")
+ self._client_cls = load_class(self._client_cls)
+ self._client = None
+
+ self._ignore_exceptions = options.get("IGNORE_EXCEPTIONS", DJANGO_REDIS_IGNORE_EXCEPTIONS)
+
+ @property
+ def client(self):
+ """
+ Lazy client connection property.
+ """
+ if self._client is None:
+ self._client = self._client_cls(self._server, self._params, self)
+ return self._client
+
+ @omit_exception
+ def set(self, *args, **kwargs):
+ return self.client.set(*args, **kwargs)
+
+ @omit_exception
+ def incr_version(self, *args, **kwargs):
+ return self.client.incr_version(*args, **kwargs)
+
+ @omit_exception
+ def add(self, *args, **kwargs):
+ return self.client.add(*args, **kwargs)
+
+ @omit_exception
+ def get(self, key, default=None, version=None, client=None):
+ try:
+ return self.client.get(key, default=default, version=version,
+ client=client)
+ except ConnectionInterrupted as e:
+ if DJANGO_REDIS_IGNORE_EXCEPTIONS or self._ignore_exceptions:
+ if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS:
+ logger.error(str(e))
+ return default
+ raise
+
+ @omit_exception
+ def delete(self, *args, **kwargs):
+ return self.client.delete(*args, **kwargs)
+
+ @omit_exception
+ def delete_pattern(self, *args, **kwargs):
+ return self.client.delete_pattern(*args, **kwargs)
+
+ @omit_exception
+ def delete_many(self, *args, **kwargs):
+ return self.client.delete_many(*args, **kwargs)
+
+ @omit_exception
+ def clear(self):
+ return self.client.clear()
+
+ @omit_exception(return_value={})
+ def get_many(self, *args, **kwargs):
+ return self.client.get_many(*args, **kwargs)
+
+ @omit_exception
+ def set_many(self, *args, **kwargs):
+ return self.client.set_many(*args, **kwargs)
+
+ @omit_exception
+ def incr(self, *args, **kwargs):
+ return self.client.incr(*args, **kwargs)
+
+ @omit_exception
+ def decr(self, *args, **kwargs):
+ return self.client.decr(*args, **kwargs)
+
+ @omit_exception
+ def has_key(self, *args, **kwargs):
+ return self.client.has_key(*args, **kwargs)
+
+ @omit_exception
+ def keys(self, *args, **kwargs):
+ return self.client.keys(*args, **kwargs)
+
+ @omit_exception
+ def iter_keys(self, *args, **kwargs):
+ return self.client.iter_keys(*args, **kwargs)
+
+ @omit_exception
+ def ttl(self, *args, **kwargs):
+ return self.client.ttl(*args, **kwargs)
+
+ @omit_exception
+ def persist(self, *args, **kwargs):
+ return self.client.persist(*args, **kwargs)
+
+ @omit_exception
+ def expire(self, *args, **kwargs):
+ return self.client.expire(*args, **kwargs)
+
+ @omit_exception
+ def lock(self, *args, **kwargs):
+ return self.client.lock(*args, **kwargs)
+
+ @omit_exception
+ def close(self, **kwargs):
+ self.client.close(**kwargs)
diff --git a/django_redis/client/__init__.py b/django_redis/client/__init__.py
new file mode 100644
index 0000000..42a1299
--- /dev/null
+++ b/django_redis/client/__init__.py
@@ -0,0 +1,8 @@
+from .default import DefaultClient
+from .sharded import ShardClient
+from .herd import HerdClient
+
+
+__all__ = ["DefaultClient",
+ "ShardClient",
+ "HerdClient"]
diff --git a/django_redis/client/default.py b/django_redis/client/default.py
new file mode 100644
index 0000000..e97b1c1
--- /dev/null
+++ b/django_redis/client/default.py
@@ -0,0 +1,510 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import, unicode_literals
+
+import random
+import socket
+import warnings
+import zlib
+from collections import OrderedDict
+
+from django.conf import settings
+from django.core.cache.backends.base import DEFAULT_TIMEOUT, get_key_func
+from django.core.exceptions import ImproperlyConfigured
+from django.utils.encoding import smart_text
+
+from redis.exceptions import ConnectionError
+from redis.exceptions import ResponseError
+
+# Compatibility with redis-py 2.10.x+
+
+try:
+ from redis.exceptions import TimeoutError, ResponseError
+ _main_exceptions = (TimeoutError, ResponseError, ConnectionError, socket.timeout)
+except ImportError:
+ _main_exceptions = (ConnectionError, socket.timeout)
+
+from ..util import CacheKey, load_class, integer_types
+from ..exceptions import ConnectionInterrupted, CompressorError
+from .. import pool
+
+
+class DefaultClient(object):
+ def __init__(self, server, params, backend):
+ self._backend = backend
+ self._server = server
+ self._params = params
+
+ self.reverse_key = get_key_func(params.get("REVERSE_KEY_FUNCTION") or
+ "django_redis.util.default_reverse_key")
+
+ if not self._server:
+ raise ImproperlyConfigured("Missing connections string")
+
+ if not isinstance(self._server, (list, tuple, set)):
+ self._server = self._server.split(",")
+
+ self._clients = [None] * len(self._server)
+ self._options = params.get("OPTIONS", {})
+
+ serializer_path = self._options.get("SERIALIZER", "django_redis.serializers.pickle.PickleSerializer")
+ serializer_cls = load_class(serializer_path)
+
+ compressor_path = self._options.get("COMPRESSOR", "django_redis.compressors.identity.IdentityCompressor")
+ compressor_cls = load_class(compressor_path)
+
+ self._serializer = serializer_cls(options=self._options)
+ self._compressor = compressor_cls(options=self._options);
+
+ self.connection_factory = pool.get_connection_factory(options=self._options)
+
+ def __contains__(self, key):
+ return self.has_key(key)
+
+ def get_next_client_index(self, write=True):
+ """
+ Return a next index for read client.
+ This function implements a default behavior for
+ get a next read client for master-slave setup.
+
+ Overwrite this function if you want a specific
+ behavior.
+ """
+ if write or len(self._server) == 1:
+ return 0
+
+ return random.randint(1, len(self._server) - 1)
+
+ def get_client(self, write=True):
+ """
+ Method used for obtain a raw redis client.
+
+ This function is used by almost all cache backend
+ operations for obtain a native redis client/connection
+ instance.
+ """
+ index = self.get_next_client_index(write=write)
+
+ if self._clients[index] is None:
+ self._clients[index] = self.connect(index)
+
+ return self._clients[index]
+
+ def connect(self, index=0):
+ """
+ Given a connection index, returns a new raw redis client/connection
+ instance. Index is used for master/slave setups and indicates that
+ connection string should be used. In normal setups, index is 0.
+ """
+ return self.connection_factory.connect(self._server[index])
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False, xx=False):
+ """
+ Persist a value to the cache, and set an optional expiration time.
+ Also supports optional nx parameter. If set to True - will use redis setnx instead of set.
+ """
+
+ if not client:
+ client = self.get_client(write=True)
+
+ nkey = self.make_key(key, version=version)
+ nvalue = self.encode(value)
+
+ if timeout is True:
+ warnings.warn("Using True as timeout value, is now deprecated.", DeprecationWarning)
+ timeout = self._backend.default_timeout
+
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self._backend.default_timeout
+
+ try:
+ if timeout is not None:
+ if timeout > 0:
+ # Convert to milliseconds
+ timeout = int(timeout * 1000)
+ elif timeout <= 0:
+ if nx:
+ # Using negative timeouts when nx is True should
+ # not expire (in our case delete) the value if it exists.
+ # Obviously expire not existent value is noop.
+ timeout = None
+ else:
+ # redis doesn't support negative timeouts in ex flags
+ # so it seems that it's better to just delete the key
+ # than to set it and than expire in a pipeline
+ return self.delete(key, client=client, version=version)
+
+ return client.set(nkey, nvalue, nx=nx, px=timeout, xx=xx)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def incr_version(self, key, delta=1, version=None, client=None):
+ """
+ Adds delta to the cache version for the supplied key. Returns the
+ new version.
+ """
+
+ if client is None:
+ client = self.get_client(write=True)
+
+ if version is None:
+ version = self._backend.version
+
+ old_key = self.make_key(key, version)
+ value = self.get(old_key, version=version, client=client)
+
+ try:
+ ttl = client.ttl(old_key)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+
+ if isinstance(key, CacheKey):
+ new_key = self.make_key(key.original_key(), version=version + delta)
+ else:
+ new_key = self.make_key(key, version=version + delta)
+
+ self.set(new_key, value, timeout=ttl, client=client)
+ self.delete(old_key, client=client)
+ return version + delta
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None):
+ """
+ Add a value to the cache, failing if the key already exists.
+
+ Returns ``True`` if the object was added, ``False`` if not.
+ """
+ return self.set(key, value, timeout, version=version, client=client, nx=True)
+
+ def get(self, key, default=None, version=None, client=None):
+ """
+ Retrieve a value from the cache.
+
+ Returns decoded value if key is found, the default if not.
+ """
+ if client is None:
+ client = self.get_client(write=False)
+
+ key = self.make_key(key, version=version)
+
+ try:
+ value = client.get(key)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ if value is None:
+ return default
+
+ return self.decode(value)
+
+ def persist(self, key, version=None, client=None):
+ if client is None:
+ client = self.get_client(write=True)
+
+ key = self.make_key(key, version=version)
+
+ if client.exists(key):
+ client.persist(key)
+
+ def expire(self, key, timeout, version=None, client=None):
+ if client is None:
+ client = self.get_client(write=True)
+
+ key = self.make_key(key, version=version)
+
+ if client.exists(key):
+ client.expire(key, timeout)
+
+ def lock(self, key, version=None, timeout=None, sleep=0.1,
+ blocking_timeout=None, client=None):
+ if client is None:
+ client = self.get_client(write=True)
+
+ key = self.make_key(key, version=version)
+ return client.lock(key, timeout=timeout, sleep=sleep,
+ blocking_timeout=blocking_timeout)
+
+ def delete(self, key, version=None, prefix=None, client=None):
+ """
+ Remove a key from the cache.
+ """
+ if client is None:
+ client = self.get_client(write=True)
+
+ try:
+ return client.delete(self.make_key(key, version=version,
+ prefix=prefix))
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def delete_pattern(self, pattern, version=None, prefix=None, client=None):
+ """
+ Remove all keys matching pattern.
+ """
+
+ if client is None:
+ client = self.get_client(write=True)
+
+ pattern = self.make_key(pattern, version=version, prefix=prefix)
+ try:
+ count = 0
+ for key in client.scan_iter(pattern):
+ client.delete(key)
+ count += 1
+ return count
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def delete_many(self, keys, version=None, client=None):
+ """
+ Remove multiple keys at once.
+ """
+
+ if client is None:
+ client = self.get_client(write=True)
+
+ keys = [self.make_key(k, version=version) for k in keys]
+
+ if not keys:
+ return
+
+ try:
+ return client.delete(*keys)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def clear(self, client=None):
+ """
+ Flush all cache keys.
+ """
+
+ if client is None:
+ client = self.get_client(write=True)
+
+ try:
+ count = 0
+ for key in client.scan_iter("*"):
+ client.delete(key)
+ count += 1
+ return count
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def decode(self, value):
+ """
+ Decode the given value.
+ """
+ try:
+ value = int(value)
+ except (ValueError, TypeError):
+ try:
+ value = self._compressor.decompress(value)
+ except CompressorError:
+ # Handle little values, chosen to be not compressed
+ pass
+ value = self._serializer.loads(value)
+ return value
+
+ def encode(self, value):
+ """
+ Encode the given value.
+ """
+
+ if isinstance(value, bool) or not isinstance(value, integer_types):
+ value = self._serializer.dumps(value)
+ value = self._compressor.compress(value)
+ return value
+
+ return value
+
+ def get_many(self, keys, version=None, client=None):
+ """
+ Retrieve many keys.
+ """
+
+ if client is None:
+ client = self.get_client(write=False)
+
+ if not keys:
+ return {}
+
+ recovered_data = OrderedDict()
+
+ new_keys = [self.make_key(k, version=version) for k in keys]
+ map_keys = dict(zip(new_keys, keys))
+
+ try:
+ results = client.mget(*new_keys)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ for key, value in zip(new_keys, results):
+ if value is None:
+ continue
+ recovered_data[map_keys[key]] = self.decode(value)
+ return recovered_data
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None):
+ """
+ Set a bunch of values in the cache at once from a dict of key/value
+ pairs. This is much more efficient than calling set() multiple times.
+
+ If timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+ """
+ if client is None:
+ client = self.get_client(write=True)
+
+ try:
+ pipeline = client.pipeline()
+ for key, value in data.items():
+ self.set(key, value, timeout, version=version, client=pipeline)
+ pipeline.execute()
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def _incr(self, key, delta=1, version=None, client=None):
+ if client is None:
+ client = self.get_client(write=True)
+
+ key = self.make_key(key, version=version)
+
+ try:
+ try:
+ # if key expired after exists check, then we get
+ # key with wrong value and ttl -1.
+ # use lua script for atomicity
+ lua = """
+ local exists = redis.call('EXISTS', KEYS[1])
+ if (exists == 1) then
+ return redis.call('INCRBY', KEYS[1], ARGV[1])
+ else return false end
+ """
+ value = client.eval(lua, 1, key, delta)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+ except ResponseError:
+ # if cached value or total value is greater than 64 bit signed
+ # integer.
+ # elif int is encoded. so redis sees the data as string.
+ # In this situations redis will throw ResponseError
+
+ # try to keep TTL of key
+
+ timeout = client.ttl(key)
+ # returns -2 if the key does not exist
+ # means, that key have expired
+ if timeout == -2:
+ raise ValueError("Key '%s' not found" % key)
+ value = self.get(key, version=version, client=client) + delta
+ self.set(key, value, version=version, timeout=timeout,
+ client=client)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ return value
+
+ def incr(self, key, delta=1, version=None, client=None):
+ """
+ Add delta to value in the cache. If the key does not exist, raise a
+ ValueError exception.
+ """
+ return self._incr(key=key, delta=delta, version=version, client=client)
+
+ def decr(self, key, delta=1, version=None, client=None):
+ """
+ Decreace delta to value in the cache. If the key does not exist, raise a
+ ValueError exception.
+ """
+ return self._incr(key=key, delta=-delta, version=version,
+ client=client)
+
+ def ttl(self, key, version=None, client=None):
+ """
+ Executes TTL redis command and return the "time-to-live" of specified key.
+ If key is a non volatile key, it returns None.
+ """
+ if client is None:
+ client = self.get_client(write=False)
+
+ key = self.make_key(key, version=version)
+ if not client.exists(key):
+ return 0
+
+ t = client.ttl(key)
+
+ if t >= 0:
+ return t
+ elif t == -1:
+ return None
+ elif t == -2:
+ return 0
+ else:
+ # Should never reach here
+ return None
+
+ def has_key(self, key, version=None, client=None):
+ """
+ Test if key exists.
+ """
+
+ if client is None:
+ client = self.get_client(write=False)
+
+ key = self.make_key(key, version=version)
+ try:
+ return client.exists(key)
+ except _main_exceptions as e:
+ raise ConnectionInterrupted(connection=client, parent=e)
+
+ def iter_keys(self, search, itersize=None, client=None, version=None):
+ """
+ Same as keys, but uses redis >= 2.8 cursors
+ for make memory efficient keys iteration.
+ """
+
... 6120 lines suppressed ...
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/django-redis.git
More information about the Python-modules-commits
mailing list