[Python-modules-commits] r31332 - in packages (34 files)
chrisk-guest at users.alioth.debian.org
chrisk-guest at users.alioth.debian.org
Fri Oct 31 18:52:11 UTC 2014
Date: Friday, October 31, 2014 @ 18:52:10
Author: chrisk-guest
Revision: 31332
[svn-inject] Installing original source of python-cachetools (0.6.0)
Added:
packages/branches/
packages/branches/upstream/
packages/branches/upstream/python-cachetools/
packages/branches/upstream/python-cachetools/current/
packages/branches/upstream/python-cachetools/current/Changes
packages/branches/upstream/python-cachetools/current/LICENSE
packages/branches/upstream/python-cachetools/current/MANIFEST.in
packages/branches/upstream/python-cachetools/current/PKG-INFO
packages/branches/upstream/python-cachetools/current/README.rst
packages/branches/upstream/python-cachetools/current/cachetools.egg-info/
packages/branches/upstream/python-cachetools/current/cachetools.egg-info/PKG-INFO
packages/branches/upstream/python-cachetools/current/cachetools.egg-info/SOURCES.txt
packages/branches/upstream/python-cachetools/current/cachetools.egg-info/dependency_links.txt
packages/branches/upstream/python-cachetools/current/cachetools.egg-info/top_level.txt
packages/branches/upstream/python-cachetools/current/cachetools/
packages/branches/upstream/python-cachetools/current/cachetools/__init__.py
packages/branches/upstream/python-cachetools/current/cachetools/cache.py
packages/branches/upstream/python-cachetools/current/cachetools/decorators.py
packages/branches/upstream/python-cachetools/current/cachetools/lfucache.py
packages/branches/upstream/python-cachetools/current/cachetools/link.py
packages/branches/upstream/python-cachetools/current/cachetools/lock.py
packages/branches/upstream/python-cachetools/current/cachetools/lrucache.py
packages/branches/upstream/python-cachetools/current/cachetools/rrcache.py
packages/branches/upstream/python-cachetools/current/cachetools/ttlcache.py
packages/branches/upstream/python-cachetools/current/setup.cfg
packages/branches/upstream/python-cachetools/current/setup.py
packages/branches/upstream/python-cachetools/current/tests/
packages/branches/upstream/python-cachetools/current/tests/__init__.py
packages/branches/upstream/python-cachetools/current/tests/test_cache.py
packages/branches/upstream/python-cachetools/current/tests/test_cachedmethod.py
packages/branches/upstream/python-cachetools/current/tests/test_lfucache.py
packages/branches/upstream/python-cachetools/current/tests/test_lrucache.py
packages/branches/upstream/python-cachetools/current/tests/test_rrcache.py
packages/branches/upstream/python-cachetools/current/tests/test_ttlcache.py
Added: packages/branches/upstream/python-cachetools/current/Changes
===================================================================
--- packages/branches/upstream/python-cachetools/current/Changes (rev 0)
+++ packages/branches/upstream/python-cachetools/current/Changes 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,58 @@
+0.5.1 2014-09-25
+----------------
+
+- No formatting of `KeyError` arguments.
+
+- Update `README.rst`.
+
+
+0.5.0 2014-09-23
+----------------
+
+- Do not delete expired items in TTLCache.__getitem__().
+
+- Add `@ttl_cache` function decorator.
+
+- Fix public `getsizeof()` usage.
+
+
+0.4.0 2014-06-16
+----------------
+
+- Add `TTLCache`.
+
+- Add `Cache` base class.
+
+- Remove `@cachedmethod` `lock` parameter.
+
+
+0.3.1 2014-05-07
+----------------
+
+- Add proper locking for `cache_clear()` and `cache_info()`.
+
+- Report `size` in `cache_info()`.
+
+
+0.3.0 2014-05-06
+----------------
+
+- Remove `@cache` decorator.
+
+- Add `size`, `getsizeof` members.
+
+- Add `@cachedmethod` decorator.
+
+
+0.2.0 2014-04-02
+----------------
+
+- Add `@cache` decorator.
+
+- Update documentation.
+
+
+0.1.0 2014-03-27
+----------------
+
+- Initial release.
Added: packages/branches/upstream/python-cachetools/current/LICENSE
===================================================================
--- packages/branches/upstream/python-cachetools/current/LICENSE (rev 0)
+++ packages/branches/upstream/python-cachetools/current/LICENSE 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Thomas Kemmer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Added: packages/branches/upstream/python-cachetools/current/MANIFEST.in
===================================================================
--- packages/branches/upstream/python-cachetools/current/MANIFEST.in (rev 0)
+++ packages/branches/upstream/python-cachetools/current/MANIFEST.in 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,6 @@
+include Changes
+include LICENSE
+include MANIFEST.in
+include README.rst
+
+recursive-include tests *.py
Added: packages/branches/upstream/python-cachetools/current/PKG-INFO
===================================================================
--- packages/branches/upstream/python-cachetools/current/PKG-INFO (rev 0)
+++ packages/branches/upstream/python-cachetools/current/PKG-INFO 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,111 @@
+Metadata-Version: 1.1
+Name: cachetools
+Version: 0.6.0
+Summary: Extensible memoizing collections and decorators
+Home-page: https://github.com/tkem/cachetools
+Author: Thomas Kemmer
+Author-email: tkemmer at computer.org
+License: MIT
+Description: cachetools
+ ========================================================================
+
+ This module provides various memoizing collections and decorators,
+ including a variant of the Python 3 Standard Library
+ ``functools.lru_cache`` function decorator.
+
+ .. code-block:: pycon
+
+ >>> from cachetools import LRUCache
+ >>> cache = LRUCache(maxsize=2)
+ >>> cache.update([('first', 1), ('second', 2)])
+ >>> cache
+ LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
+ >>> cache['third'] = 3
+ >>> cache
+ LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
+ >>> cache['second']
+ 2
+ >>> cache['fourth'] = 4
+ LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+
+
+ For the purpose of this module, a *cache* is a mutable_ mapping_ of a
+ fixed maximum size. When the cache is full, i.e. the size of the
+ cache would exceed its maximum size, the cache must choose which
+ item(s) to discard based on a suitable `cache algorithm`_. A cache's
+ size is the sum of the size of its items, and an item's size in
+ general is a property or function of its value, e.g. the result of
+ ``sys.getsizeof``, or ``len`` for string and sequence values.
+
+ This module provides various cache implementations based on different
+ cache algorithms, as well as decorators for easily memoizing function
+ and method calls.
+
+
+ Installation
+ ------------------------------------------------------------------------
+
+ Install cachetools using pip::
+
+ pip install cachetools
+
+
+ Project Resources
+ ------------------------------------------------------------------------
+
+ .. image:: http://img.shields.io/pypi/v/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Latest PyPI version
+
+ .. image:: http://img.shields.io/pypi/dm/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Number of PyPI downloads
+
+ .. image:: http://img.shields.io/travis/tkem/cachetools.svg?style=flat
+ :target: https://travis-ci.org/tkem/cachetools/
+ :alt: Travis CI build status
+
+ .. image:: http://img.shields.io/coveralls/tkem/cachetools.svg?style=flat
+ :target: https://coveralls.io/r/tkem/cachetools
+ :alt: Test coverage
+
+ - `Documentation`_
+ - `Issue Tracker`_
+ - `Source Code`_
+ - `Change Log`_
+
+
+ License
+ ------------------------------------------------------------------------
+
+ Copyright (c) 2014 Thomas Kemmer.
+
+ Licensed under the `MIT License`_.
+
+
+ .. _functools.lru_cache: http://docs.python.org/3.4/library/functools.html#functools.lru_cache
+ .. _mutable: http://docs.python.org/dev/glossary.html#term-mutable
+ .. _mapping: http://docs.python.org/dev/glossary.html#term-mapping
+ .. _cache algorithm: http://en.wikipedia.org/wiki/Cache_algorithms
+
+ .. _Documentation: http://pythonhosted.org/cachetools/
+ .. _Issue Tracker: https://github.com/tkem/cachetools/issues/
+ .. _Source Code: https://github.com/tkem/cachetools/
+ .. _Change Log: http://raw.github.com/tkem/cachetools/master/Changes
+ .. _MIT License: http://raw.github.com/tkem/cachetools/master/LICENSE
+
+Keywords: cache caching LRU LFU TTL
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
Added: packages/branches/upstream/python-cachetools/current/README.rst
===================================================================
--- packages/branches/upstream/python-cachetools/current/README.rst (rev 0)
+++ packages/branches/upstream/python-cachetools/current/README.rst 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,87 @@
+cachetools
+========================================================================
+
+This module provides various memoizing collections and decorators,
+including a variant of the Python 3 Standard Library
+``functools.lru_cache`` function decorator.
+
+.. code-block:: pycon
+
+ >>> from cachetools import LRUCache
+ >>> cache = LRUCache(maxsize=2)
+ >>> cache.update([('first', 1), ('second', 2)])
+ >>> cache
+ LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
+ >>> cache['third'] = 3
+ >>> cache
+ LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
+ >>> cache['second']
+ 2
+ >>> cache['fourth'] = 4
+ LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+
+
+For the purpose of this module, a *cache* is a mutable_ mapping_ of a
+fixed maximum size. When the cache is full, i.e. the size of the
+cache would exceed its maximum size, the cache must choose which
+item(s) to discard based on a suitable `cache algorithm`_. A cache's
+size is the sum of the size of its items, and an item's size in
+general is a property or function of its value, e.g. the result of
+``sys.getsizeof``, or ``len`` for string and sequence values.
+
+This module provides various cache implementations based on different
+cache algorithms, as well as decorators for easily memoizing function
+and method calls.
+
+
+Installation
+------------------------------------------------------------------------
+
+Install cachetools using pip::
+
+ pip install cachetools
+
+
+Project Resources
+------------------------------------------------------------------------
+
+.. image:: http://img.shields.io/pypi/v/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Latest PyPI version
+
+.. image:: http://img.shields.io/pypi/dm/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Number of PyPI downloads
+
+.. image:: http://img.shields.io/travis/tkem/cachetools.svg?style=flat
+ :target: https://travis-ci.org/tkem/cachetools/
+ :alt: Travis CI build status
+
+.. image:: http://img.shields.io/coveralls/tkem/cachetools.svg?style=flat
+ :target: https://coveralls.io/r/tkem/cachetools
+ :alt: Test coverage
+
+- `Documentation`_
+- `Issue Tracker`_
+- `Source Code`_
+- `Change Log`_
+
+
+License
+------------------------------------------------------------------------
+
+Copyright (c) 2014 Thomas Kemmer.
+
+Licensed under the `MIT License`_.
+
+
+.. _functools.lru_cache: http://docs.python.org/3.4/library/functools.html#functools.lru_cache
+.. _mutable: http://docs.python.org/dev/glossary.html#term-mutable
+.. _mapping: http://docs.python.org/dev/glossary.html#term-mapping
+.. _cache algorithm: http://en.wikipedia.org/wiki/Cache_algorithms
+
+.. _Documentation: http://pythonhosted.org/cachetools/
+.. _Issue Tracker: https://github.com/tkem/cachetools/issues/
+.. _Source Code: https://github.com/tkem/cachetools/
+.. _Change Log: http://raw.github.com/tkem/cachetools/master/Changes
+.. _MIT License: http://raw.github.com/tkem/cachetools/master/LICENSE
Added: packages/branches/upstream/python-cachetools/current/cachetools/__init__.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/__init__.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/__init__.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,17 @@
+"""Extensible memoizing collections and decorators"""
+
+from .cache import Cache
+from .rrcache import RRCache, rr_cache
+from .lfucache import LFUCache, lfu_cache
+from .lrucache import LRUCache, lru_cache
+from .ttlcache import TTLCache, ttl_cache
+from .decorators import cachedmethod
+
+__all__ = (
+ 'Cache',
+ 'RRCache', 'LFUCache', 'LRUCache', 'TTLCache',
+ 'rr_cache', 'lfu_cache', 'lru_cache', 'ttl_cache',
+ 'cachedmethod'
+)
+
+__version__ = '0.6.0'
Added: packages/branches/upstream/python-cachetools/current/cachetools/cache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/cache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/cache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,77 @@
+import collections
+
+
+class Cache(collections.MutableMapping):
+ """Mutable mapping to serve as a simple cache or cache base class.
+
+ This class discards arbitrary items using :meth:`popitem` to make
+ space when necessary. Derived classes may override
+ :meth:`popitem` to implement specific caching strategies. If a
+ subclass has to keep track of item access, insertion or deletion,
+ it may additionally need to override :meth:`__getitem__`,
+ :meth:`__setitem__` and :meth:`__delitem__`. If a subclass has to
+ keep meta data with its values, i.e. the `value` argument passed
+ to :meth:`Cache.__setitem__` is different from what a user would
+ regard as the cache's value, it will probably want to override
+ :meth:`getsizeof`, too.
+
+ """
+
+ def __init__(self, maxsize, getsizeof=None):
+ self.__mapping = dict()
+ self.__maxsize = maxsize
+ self.__getsizeof = getsizeof or self.__one
+ self.__currsize = 0
+
+ def __repr__(self):
+ return '%s(%r, maxsize=%d, currsize=%d)' % (
+ self.__class__.__name__,
+ list(self.items()),
+ self.__maxsize,
+ self.__currsize,
+ )
+
+ def __getitem__(self, key):
+ return self.__mapping[key][0]
+
+ def __setitem__(self, key, value):
+ mapping = self.__mapping
+ maxsize = self.__maxsize
+ size = self.__getsizeof(value)
+ if size > maxsize:
+ raise ValueError('value too large')
+ if key not in mapping or mapping[key][1] < size:
+ while self.__currsize + size > maxsize:
+ self.popitem()
+ if key in mapping:
+ self.__currsize -= mapping[key][1]
+ mapping[key] = (value, size)
+ self.__currsize += size
+
+ def __delitem__(self, key):
+ _, size = self.__mapping.pop(key)
+ self.__currsize -= size
+
+ def __iter__(self):
+ return iter(self.__mapping)
+
+ def __len__(self):
+ return len(self.__mapping)
+
+ @property
+ def maxsize(self):
+ """Return the maximum size of the cache."""
+ return self.__maxsize
+
+ @property
+ def currsize(self):
+ """Return the current size of the cache."""
+ return self.__currsize
+
+ def getsizeof(self, value):
+ """Return the size of a cache element."""
+ return self.__getsizeof(value)
+
+ @staticmethod
+ def __one(value):
+ return 1
Added: packages/branches/upstream/python-cachetools/current/cachetools/decorators.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/decorators.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/decorators.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,94 @@
+import collections
+import contextlib # noqa
+import functools
+
+try:
+ from contextlib import ExitStack as NullContext # Python 3.3
+except ImportError:
+ class NullContext:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+CacheInfo = collections.namedtuple('CacheInfo', 'hits misses maxsize currsize')
+
+nullcontext = NullContext()
+
+
+def makekey_untyped(args, kwargs):
+ return (args, tuple(sorted(kwargs.items())))
+
+
+def makekey_typed(args, kwargs):
+ key = makekey_untyped(args, kwargs)
+ key += tuple(type(v) for v in args)
+ key += tuple(type(v) for _, v in sorted(kwargs.items()))
+ return key
+
+
+def cachedfunc(cache, typed=False, lock=None):
+ makekey = makekey_typed if typed else makekey_untyped
+ context = lock() if lock else nullcontext
+
+ def decorator(func):
+ stats = [0, 0]
+
+ def wrapper(*args, **kwargs):
+ key = makekey(args, kwargs)
+ with context:
+ try:
+ result = cache[key]
+ stats[0] += 1
+ return result
+ except KeyError:
+ stats[1] += 1
+ result = func(*args, **kwargs)
+ with context:
+ cache[key] = result
+ return result
+
+ def cache_info():
+ with context:
+ hits, misses = stats
+ maxsize = cache.maxsize
+ currsize = cache.currsize
+ return CacheInfo(hits, misses, maxsize, currsize)
+
+ def cache_clear():
+ with context:
+ cache.clear()
+
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return functools.update_wrapper(wrapper, func)
+
+ return decorator
+
+
+def cachedmethod(cache, typed=False):
+ """Decorator to wrap a class or instance method with a memoizing
+ callable that saves results in a (possibly shared) cache.
+
+ """
+ makekey = makekey_typed if typed else makekey_untyped
+
+ def decorator(method):
+ def wrapper(self, *args, **kwargs):
+ mapping = cache(self)
+ if mapping is None:
+ return method(self, *args, **kwargs)
+ key = makekey((method,) + args, kwargs)
+ try:
+ return mapping[key]
+ except KeyError:
+ pass
+ result = method(self, *args, **kwargs)
+ mapping[key] = result
+ return result
+
+ wrapper.cache = cache
+ return functools.update_wrapper(wrapper, method)
+
+ return decorator
Added: packages/branches/upstream/python-cachetools/current/cachetools/lfucache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/lfucache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/lfucache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,47 @@
+from .cache import Cache
+from .decorators import cachedfunc
+from .lock import RLock
+
+import operator
+
+
+class LFUCache(Cache):
+ """Least Frequently Used (LFU) cache implementation.
+
+ This class counts how often an item is retrieved, and discards the
+ items used least often to make space when necessary.
+
+ """
+
+ def __init__(self, maxsize, getsizeof=None):
+ if getsizeof is None:
+ Cache.__init__(self, maxsize)
+ else:
+ Cache.__init__(self, maxsize, lambda e: getsizeof(e[0]))
+ self.getsizeof = getsizeof
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ entry = cache_getitem(self, key)
+ entry[1] += 1
+ return entry[0]
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, [value, 0])
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least frequently used."""
+ items = ((key, Cache.__getitem__(self, key)[1]) for key in self)
+ try:
+ key, _ = min(items, key=operator.itemgetter(1))
+ except ValueError:
+ raise KeyError('cache is empty')
+ return (key, self.pop(key))
+
+
+def lfu_cache(maxsize=128, typed=False, getsizeof=None, lock=RLock):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Frequently Used (LFU)
+ algorithm.
+
+ """
+ return cachedfunc(LFUCache(maxsize, getsizeof), typed, lock)
Added: packages/branches/upstream/python-cachetools/current/cachetools/link.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/link.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/link.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,10 @@
+class Link(object):
+ __slots__ = 'prev', 'next', 'data'
+
+ def unlink(self):
+ next = self.next
+ prev = self.prev
+ prev.next = next
+ next.prev = prev
+ del self.next
+ del self.prev
Added: packages/branches/upstream/python-cachetools/current/cachetools/lock.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/lock.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/lock.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,6 @@
+# flake8: noqa
+
+try:
+ from threading import RLock
+except ImportError:
+ from dummy_threading import RLock
Added: packages/branches/upstream/python-cachetools/current/cachetools/lrucache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/lrucache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/lrucache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,84 @@
+from .cache import Cache
+from .decorators import cachedfunc
+from .link import Link
+from .lock import RLock
+
+
+class LRUCache(Cache):
+ """Least Recently Used (LRU) cache implementation.
+
+ This class discards the least recently used items first to make
+ space when necessary.
+
+ """
+
+ def __init__(self, maxsize, getsizeof=None):
+ if getsizeof is None:
+ Cache.__init__(self, maxsize)
+ else:
+ Cache.__init__(self, maxsize, lambda e: getsizeof(e[0]))
+ self.getsizeof = getsizeof
+ root = Link()
+ root.prev = root.next = root
+ self.__root = root
+
+ def __repr__(self, cache_getitem=Cache.__getitem__):
+ return '%s(%r, maxsize=%d, currsize=%d)' % (
+ self.__class__.__name__,
+ [(key, cache_getitem(self, key)[0]) for key in self],
+ self.maxsize,
+ self.currsize,
+ )
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value, link = cache_getitem(self, key)
+ root = self.__root
+ link.prev.next = link.next
+ link.next.prev = link.prev
+ link.prev = tail = root.prev
+ link.next = root
+ tail.next = root.prev = link
+ return value
+
+ def __setitem__(self, key, value,
+ cache_getitem=Cache.__getitem__,
+ cache_setitem=Cache.__setitem__):
+ try:
+ _, link = cache_getitem(self, key)
+ except KeyError:
+ link = Link()
+ cache_setitem(self, key, (value, link))
+ try:
+ link.prev.next = link.next
+ link.next.prev = link.prev
+ except AttributeError:
+ link.data = key
+ root = self.__root
+ link.prev = tail = root.prev
+ link.next = root
+ tail.next = root.prev = link
+
+ def __delitem__(self, key,
+ cache_getitem=Cache.__getitem__,
+ cache_delitem=Cache.__delitem__):
+ _, link = cache_getitem(self, key)
+ cache_delitem(self, key)
+ link.unlink()
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used."""
+ root = self.__root
+ link = root.next
+ if link is root:
+ raise KeyError('cache is empty')
+ key = link.data
+ return (key, self.pop(key))
+
+
+def lru_cache(maxsize=128, typed=False, getsizeof=None, lock=RLock):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm.
+
+ """
+ return cachedfunc(LRUCache(maxsize, getsizeof), typed, lock)
Added: packages/branches/upstream/python-cachetools/current/cachetools/rrcache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/rrcache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/rrcache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,31 @@
+from .cache import Cache
+from .decorators import cachedfunc
+from .lock import RLock
+
+import random
+
+
+class RRCache(Cache):
+ """Random Replacement (RR) cache implementation.
+
+ This class randomly selects candidate items and discards them to
+ make space when necessary.
+
+ """
+
+ def popitem(self):
+ """Remove and return a random `(key, value)` pair."""
+ try:
+ key = random.choice(list(self))
+ except IndexError:
+ raise KeyError('cache is empty')
+ return (key, self.pop(key))
+
+
+def rr_cache(maxsize=128, typed=False, getsizeof=None, lock=RLock):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Random Replacement (RR)
+ algorithm.
+
+ """
+ return cachedfunc(RRCache(maxsize, getsizeof), typed, lock)
Added: packages/branches/upstream/python-cachetools/current/cachetools/ttlcache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools/ttlcache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools/ttlcache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,131 @@
+from .lrucache import LRUCache
+from .decorators import cachedfunc
+from .link import Link
+from .lock import RLock
+
+import time
+
+_marker = object()
+
+
+class TTLCache(LRUCache):
+ """Cache implementation with per-item time-to-live (TTL) value.
+
+ This class associates a time-to-live value with each item. Items
+ that expire because they have exceeded their time-to-live will be
+ removed. If no expired items are there to remove, the least
+ recently used items will be discarded first to make space when
+ necessary.
+
+ By default, the time-to-live is specified in seconds, and the
+ standard :func:`time.time` function is used to retrieve the
+ current time. A custom `timer` function can be supplied if
+ needed.
+
+ """
+
+ class ExpiredError(KeyError):
+ """Raised when a cached item's time-to-live has expired.
+
+ This is a subclass of :exc:`KeyError`.
+
+ """
+ pass
+
+ def __init__(self, maxsize, ttl, timer=time.time, getsizeof=None):
+ if getsizeof is None:
+ LRUCache.__init__(self, maxsize)
+ else:
+ LRUCache.__init__(self, maxsize, lambda e: getsizeof(e[0]))
+ self.getsizeof = getsizeof
+ root = Link()
+ root.prev = root.next = root
+ self.__root = root
+ self.__timer = timer
+ self.__ttl = ttl
+
+ def __repr__(self, cache_getitem=LRUCache.__getitem__):
+ return '%s(%r, maxsize=%d, currsize=%d)' % (
+ self.__class__.__name__,
+ [(key, cache_getitem(self, key)[0]) for key in self],
+ self.maxsize,
+ self.currsize,
+ )
+
+ def __getitem__(self, key, cache_getitem=LRUCache.__getitem__):
+ value, link = cache_getitem(self, key)
+ if link.data[1] < self.__timer():
+ raise TTLCache.ExpiredError(key)
+ return value
+
+ def __setitem__(self, key, value,
+ cache_getitem=LRUCache.__getitem__,
+ cache_setitem=LRUCache.__setitem__,
+ cache_delitem=LRUCache.__delitem__):
+ time = self.__timer()
+ self.expire(time)
+ try:
+ _, link = cache_getitem(self, key)
+ except KeyError:
+ link = Link()
+ cache_setitem(self, key, (value, link))
+ try:
+ link.prev.next = link.next
+ link.next.prev = link.prev
+ except AttributeError:
+ pass
+ root = self.__root
+ link.data = (key, time + self.__ttl)
+ link.prev = tail = root.prev
+ link.next = root
+ tail.next = root.prev = link
+
+ def __delitem__(self, key,
+ cache_getitem=LRUCache.__getitem__,
+ cache_delitem=LRUCache.__delitem__):
+ _, link = cache_getitem(self, key)
+ cache_delitem(self, key)
+ link.unlink()
+ self.expire()
+
+ def expire(self, time=None, cache_delitem=LRUCache.__delitem__):
+ """Remove expired items from the cache."""
+ if time is None:
+ time = self.__timer()
+ root = self.__root
+ head = root.next
+ while head is not root and head.data[1] < time:
+ cache_delitem(self, head.data[0])
+ head.next.prev = root
+ head = root.next = head.next
+
+ def pop(self, key, default=_marker):
+ try:
+ value, link = LRUCache.__getitem__(self, key)
+ except KeyError:
+ if default is _marker:
+ raise
+ return default
+ LRUCache.__delitem__(self, key)
+ link.unlink()
+ self.expire()
+ return value
+
+ @property
+ def timer(self):
+ """Return the timer used by the cache."""
+ return self.__timer
+
+ @property
+ def ttl(self):
+ """Return the time-to-live of the cache."""
+ return self.__ttl
+
+
+def ttl_cache(maxsize=128, ttl=600, timer=time.time, typed=False,
+ getsizeof=None, lock=RLock):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm with a per-item time-to-live (TTL) value.
+ """
+ return cachedfunc(TTLCache(maxsize, ttl, timer, getsizeof), typed, lock)
Added: packages/branches/upstream/python-cachetools/current/cachetools.egg-info/PKG-INFO
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools.egg-info/PKG-INFO (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools.egg-info/PKG-INFO 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,111 @@
+Metadata-Version: 1.1
+Name: cachetools
+Version: 0.6.0
+Summary: Extensible memoizing collections and decorators
+Home-page: https://github.com/tkem/cachetools
+Author: Thomas Kemmer
+Author-email: tkemmer at computer.org
+License: MIT
+Description: cachetools
+ ========================================================================
+
+ This module provides various memoizing collections and decorators,
+ including a variant of the Python 3 Standard Library
+ ``functools.lru_cache`` function decorator.
+
+ .. code-block:: pycon
+
+ >>> from cachetools import LRUCache
+ >>> cache = LRUCache(maxsize=2)
+ >>> cache.update([('first', 1), ('second', 2)])
+ >>> cache
+ LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
+ >>> cache['third'] = 3
+ >>> cache
+ LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
+ >>> cache['second']
+ 2
+ >>> cache['fourth'] = 4
+ LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+
+
+ For the purpose of this module, a *cache* is a mutable_ mapping_ of a
+ fixed maximum size. When the cache is full, i.e. the size of the
+ cache would exceed its maximum size, the cache must choose which
+ item(s) to discard based on a suitable `cache algorithm`_. A cache's
+ size is the sum of the size of its items, and an item's size in
+ general is a property or function of its value, e.g. the result of
+ ``sys.getsizeof``, or ``len`` for string and sequence values.
+
+ This module provides various cache implementations based on different
+ cache algorithms, as well as decorators for easily memoizing function
+ and method calls.
+
+
+ Installation
+ ------------------------------------------------------------------------
+
+ Install cachetools using pip::
+
+ pip install cachetools
+
+
+ Project Resources
+ ------------------------------------------------------------------------
+
+ .. image:: http://img.shields.io/pypi/v/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Latest PyPI version
+
+ .. image:: http://img.shields.io/pypi/dm/cachetools.svg?style=flat
+ :target: https://pypi.python.org/pypi/cachetools/
+ :alt: Number of PyPI downloads
+
+ .. image:: http://img.shields.io/travis/tkem/cachetools.svg?style=flat
+ :target: https://travis-ci.org/tkem/cachetools/
+ :alt: Travis CI build status
+
+ .. image:: http://img.shields.io/coveralls/tkem/cachetools.svg?style=flat
+ :target: https://coveralls.io/r/tkem/cachetools
+ :alt: Test coverage
+
+ - `Documentation`_
+ - `Issue Tracker`_
+ - `Source Code`_
+ - `Change Log`_
+
+
+ License
+ ------------------------------------------------------------------------
+
+ Copyright (c) 2014 Thomas Kemmer.
+
+ Licensed under the `MIT License`_.
+
+
+ .. _functools.lru_cache: http://docs.python.org/3.4/library/functools.html#functools.lru_cache
+ .. _mutable: http://docs.python.org/dev/glossary.html#term-mutable
+ .. _mapping: http://docs.python.org/dev/glossary.html#term-mapping
+ .. _cache algorithm: http://en.wikipedia.org/wiki/Cache_algorithms
+
+ .. _Documentation: http://pythonhosted.org/cachetools/
+ .. _Issue Tracker: https://github.com/tkem/cachetools/issues/
+ .. _Source Code: https://github.com/tkem/cachetools/
+ .. _Change Log: http://raw.github.com/tkem/cachetools/master/Changes
+ .. _MIT License: http://raw.github.com/tkem/cachetools/master/LICENSE
+
+Keywords: cache caching LRU LFU TTL
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
Added: packages/branches/upstream/python-cachetools/current/cachetools.egg-info/SOURCES.txt
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools.egg-info/SOURCES.txt (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools.egg-info/SOURCES.txt 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,26 @@
+Changes
+LICENSE
+MANIFEST.in
+README.rst
+setup.cfg
+setup.py
+cachetools/__init__.py
+cachetools/cache.py
+cachetools/decorators.py
+cachetools/lfucache.py
+cachetools/link.py
+cachetools/lock.py
+cachetools/lrucache.py
+cachetools/rrcache.py
+cachetools/ttlcache.py
+cachetools.egg-info/PKG-INFO
+cachetools.egg-info/SOURCES.txt
+cachetools.egg-info/dependency_links.txt
+cachetools.egg-info/top_level.txt
+tests/__init__.py
+tests/test_cache.py
+tests/test_cachedmethod.py
+tests/test_lfucache.py
+tests/test_lrucache.py
+tests/test_rrcache.py
+tests/test_ttlcache.py
\ No newline at end of file
Added: packages/branches/upstream/python-cachetools/current/cachetools.egg-info/dependency_links.txt
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools.egg-info/dependency_links.txt (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools.egg-info/dependency_links.txt 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1 @@
+
Added: packages/branches/upstream/python-cachetools/current/cachetools.egg-info/top_level.txt
===================================================================
--- packages/branches/upstream/python-cachetools/current/cachetools.egg-info/top_level.txt (rev 0)
+++ packages/branches/upstream/python-cachetools/current/cachetools.egg-info/top_level.txt 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1 @@
+cachetools
Added: packages/branches/upstream/python-cachetools/current/setup.cfg
===================================================================
--- packages/branches/upstream/python-cachetools/current/setup.cfg (rev 0)
+++ packages/branches/upstream/python-cachetools/current/setup.cfg 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,16 @@
+[flake8]
+exclude = .git,docs
+
+[build_sphinx]
+source-dir = docs/
+build-dir = docs/_build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = docs/_build/html
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
Added: packages/branches/upstream/python-cachetools/current/setup.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/setup.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/setup.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,37 @@
+from setuptools import setup
+
+
+def get_version(filename):
+ import re
+ content = open(filename).read()
+ metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
+ return metadata['version']
+
+setup(
+ name='cachetools',
+ version=get_version('cachetools/__init__.py'),
+ author='Thomas Kemmer',
+ author_email='tkemmer at computer.org',
+ url='https://github.com/tkem/cachetools',
+ license='MIT',
+ description='Extensible memoizing collections and decorators', # noqa
+ long_description=open('README.rst').read(),
+ keywords='cache caching LRU LFU TTL',
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Environment :: Other Environment',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Topic :: Software Development :: Libraries :: Python Modules'
+ ],
+ packages=['cachetools'],
+ test_suite='tests'
+)
Added: packages/branches/upstream/python-cachetools/current/tests/__init__.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/__init__.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/__init__.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,188 @@
+class CacheTestMixin(object):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ raise NotImplementedError
+
+ def test_defaults(self):
+ cache = self.make_cache(maxsize=1)
+ self.assertEqual(0, len(cache))
+ self.assertEqual(1, cache.maxsize)
+ self.assertEqual(0, cache.currsize)
+ self.assertEqual(1, cache.getsizeof(None))
+ self.assertEqual(1, cache.getsizeof(''))
+ self.assertEqual(1, cache.getsizeof(0))
+
+ def test_insert(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache[3] = 3
+ self.assertEqual(2, len(cache))
+ self.assertEqual(3, cache[3])
+ self.assertTrue(1 in cache or 2 in cache)
+
+ cache[4] = 4
+ self.assertEqual(2, len(cache))
+ self.assertEqual(4, cache[4])
+ self.assertTrue(1 in cache or 2 in cache or 3 in cache)
+
+ def test_update(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache.update({1: 'a', 2: 'b'})
+ self.assertEqual(2, len(cache))
+ self.assertEqual('a', cache[1])
+ self.assertEqual('b', cache[2])
+
+ def test_delete(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ del cache[2]
+ self.assertEqual(1, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertNotIn(2, cache)
+
+ del cache[1]
+ self.assertEqual(0, len(cache))
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+
+ def test_pop(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, cache.pop(2))
+ self.assertEqual(1, len(cache))
+ self.assertEqual(1, cache.pop(1))
+ self.assertEqual(0, len(cache))
+
+ with self.assertRaises(KeyError):
+ cache.pop(2)
+ with self.assertRaises(KeyError):
+ cache.pop(1)
+ with self.assertRaises(KeyError):
+ cache.pop(0)
+
+ self.assertEqual(None, cache.pop(2, None))
+ self.assertEqual(None, cache.pop(1, None))
+ self.assertEqual(None, cache.pop(0, None))
+
+ def test_popitem(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache.update({1: 1, 2: 2})
+ self.assertIn(cache.pop(1), {1: 1, 2: 2})
+ self.assertEqual(1, len(cache))
+ self.assertIn(cache.pop(2), {1: 1, 2: 2})
+ self.assertEqual(0, len(cache))
+
+ with self.assertRaises(KeyError):
+ cache.popitem()
+
+ def test_getsizeof(self):
+ cache = self.make_cache(maxsize=3, getsizeof=lambda x: x)
+ self.assertEqual(3, cache.maxsize)
+ self.assertEqual(0, cache.currsize)
+ self.assertEqual(1, cache.getsizeof(1))
+ self.assertEqual(2, cache.getsizeof(2))
+ self.assertEqual(3, cache.getsizeof(3))
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(3, cache.currsize)
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache[1] = 2
+ self.assertEqual(1, len(cache))
+ self.assertEqual(2, cache.currsize)
+ self.assertEqual(2, cache[1])
+ self.assertNotIn(2, cache)
+
+ cache.update({1: 1, 2: 2})
+ self.assertEqual(2, len(cache))
+ self.assertEqual(3, cache.currsize)
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache[3] = 3
+ self.assertEqual(1, len(cache))
+ self.assertEqual(3, cache.currsize)
+ self.assertEqual(3, cache[3])
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+
+ with self.assertRaises(ValueError):
+ cache[4] = 4
+ self.assertEqual(1, len(cache))
+ self.assertEqual(3, cache.currsize)
+ self.assertEqual(3, cache[3])
+
+
+class LRUCacheTestMixin(CacheTestMixin):
+
+ def test_lru_insert(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache[1] = 1
+ cache[2] = 2
+ cache[3] = 3
+
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[2], 2)
+ self.assertEqual(cache[3], 3)
+ self.assertNotIn(1, cache)
+
+ cache[2]
+ cache[4] = 4
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[2], 2)
+ self.assertEqual(cache[4], 4)
+ self.assertNotIn(3, cache)
+
+ cache[5] = 5
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[4], 4)
+ self.assertEqual(cache[5], 5)
+ self.assertNotIn(2, cache)
+
+ def test_lru_getsizeof(self):
+ cache = self.make_cache(maxsize=3, getsizeof=lambda x: x)
+
+ cache[1] = 1
+ cache[2] = 2
+
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[1], 1)
+ self.assertEqual(cache[2], 2)
+
+ cache[3] = 3
+
+ self.assertEqual(len(cache), 1)
+ self.assertEqual(cache[3], 3)
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+
+ with self.assertRaises(ValueError):
+ cache[4] = 4
+ self.assertEqual(len(cache), 1)
+ self.assertEqual(cache[3], 3)
Added: packages/branches/upstream/python-cachetools/current/tests/test_cache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_cache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_cache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,10 @@
+import unittest
+
+from . import CacheTestMixin
+from cachetools import Cache
+
+
+class CacheTest(unittest.TestCase, CacheTestMixin):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ return Cache(maxsize, getsizeof)
Added: packages/branches/upstream/python-cachetools/current/tests/test_cachedmethod.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_cachedmethod.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_cachedmethod.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,62 @@
+import unittest
+import operator
+
+from cachetools import LRUCache, cachedmethod
+
+
+class Cached(object):
+
+ count = 0
+
+ def __init__(self, cache):
+ self.cache = cache
+
+ @cachedmethod(operator.attrgetter('cache'))
+ def get(self, value):
+ count = self.count
+ self.count += 1
+ return count
+
+ @cachedmethod(operator.attrgetter('cache'), typed=True)
+ def get_typed(self, value):
+ count = self.count
+ self.count += 1
+ return count
+
+
+class CachedMethodTest(unittest.TestCase):
+
+ def test_decorator(self):
+ cached = Cached(LRUCache(maxsize=2))
+ self.assertEqual(cached.cache, cached.get.cache(cached))
+
+ self.assertEqual(cached.get(0), 0)
+ self.assertEqual(cached.get(1), 1)
+ self.assertEqual(cached.get(1), 1)
+ self.assertEqual(cached.get(1.0), 1)
+ self.assertEqual(cached.get(1.0), 1)
+
+ cached.cache.clear()
+ self.assertEqual(cached.get(1), 2)
+
+ def test_typed_decorator(self):
+ cached = Cached(LRUCache(maxsize=2))
+ self.assertEqual(cached.cache, cached.get_typed.cache(cached))
+
+ self.assertEqual(cached.get_typed(0), 0)
+ self.assertEqual(cached.get_typed(1), 1)
+ self.assertEqual(cached.get_typed(1), 1)
+ self.assertEqual(cached.get_typed(1.0), 2)
+ self.assertEqual(cached.get_typed(1.0), 2)
+ self.assertEqual(cached.get_typed(0.0), 3)
+ self.assertEqual(cached.get_typed(0), 4)
+
+ def test_decorator_nocache(self):
+ cached = Cached(None)
+ self.assertEqual(None, cached.get.cache(cached))
+
+ self.assertEqual(cached.get(0), 0)
+ self.assertEqual(cached.get(1), 1)
+ self.assertEqual(cached.get(1), 2)
+ self.assertEqual(cached.get(1.0), 3)
+ self.assertEqual(cached.get(1.0), 4)
Added: packages/branches/upstream/python-cachetools/current/tests/test_lfucache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_lfucache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_lfucache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,82 @@
+import unittest
+
+from . import CacheTestMixin
+from cachetools import LFUCache, lfu_cache
+
+
+ at lfu_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+ at lfu_cache(maxsize=2, typed=True, lock=None)
+def cached_typed(n):
+ return n
+
+
+class LFUCacheTest(unittest.TestCase, CacheTestMixin):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ return LFUCache(maxsize, getsizeof)
+
+ def test_lfu_insert(self):
+ cache = self.make_cache(maxsize=2)
+
+ cache[1] = 1
+ cache[1]
+ cache[2] = 2
+ cache[3] = 3
+
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[1], 1)
+ self.assertTrue(2 in cache or 3 in cache)
+ self.assertTrue(2 not in cache or 3 not in cache)
+
+ cache[4] = 4
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[4], 4)
+ self.assertEqual(cache[1], 1)
+
+ def test_lfu_getsizeof(self):
+ cache = self.make_cache(maxsize=3, getsizeof=lambda x: x)
+
+ cache[1] = 1
+ cache[2] = 2
+
+ self.assertEqual(len(cache), 2)
+ self.assertEqual(cache[1], 1)
+ self.assertEqual(cache[2], 2)
+
+ cache[3] = 3
+
+ self.assertEqual(len(cache), 1)
+ self.assertEqual(cache[3], 3)
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+
+ with self.assertRaises(ValueError):
+ cache[4] = 4
+ self.assertEqual(len(cache), 1)
+ self.assertEqual(cache[3], 3)
+
+ def test_decorator(self):
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached(1.0), 1.0)
+ self.assertEqual(cached.cache_info(), (2, 1, 2, 1))
+
+ cached.cache_clear()
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (2, 2, 2, 1))
+
+ def test_typed_decorator(self):
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (1, 2, 2, 2))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (2, 2, 2, 2))
Added: packages/branches/upstream/python-cachetools/current/tests/test_lrucache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_lrucache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_lrucache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,43 @@
+import unittest
+
+from . import LRUCacheTestMixin
+from cachetools import LRUCache, lru_cache
+
+
+ at lru_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+ at lru_cache(maxsize=2, typed=True, lock=None)
+def cached_typed(n):
+ return n
+
+
+class LRUCacheTest(unittest.TestCase, LRUCacheTestMixin):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ return LRUCache(maxsize, getsizeof)
+
+ def test_decorator(self):
+ self.assertEqual(cached.cache_info(), (0, 0, 2, 0))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached(1.0), 1.0)
+ self.assertEqual(cached.cache_info(), (2, 1, 2, 1))
+
+ cached.cache_clear()
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (2, 2, 2, 1))
+
+ def test_typed_decorator(self):
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (1, 2, 2, 2))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (2, 2, 2, 2))
Added: packages/branches/upstream/python-cachetools/current/tests/test_rrcache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_rrcache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_rrcache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,42 @@
+import unittest
+
+from . import CacheTestMixin
+from cachetools import RRCache, rr_cache
+
+
+ at rr_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+ at rr_cache(maxsize=2, typed=True, lock=None)
+def cached_typed(n):
+ return n
+
+
+class RRCacheTest(unittest.TestCase, CacheTestMixin):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ return RRCache(maxsize, getsizeof)
+
+ def test_decorator(self):
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached(1.0), 1.0)
+ self.assertEqual(cached.cache_info(), (2, 1, 2, 1))
+
+ cached.cache_clear()
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (2, 2, 2, 1))
+
+ def test_typed_decorator(self):
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (1, 2, 2, 2))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (2, 2, 2, 2))
Added: packages/branches/upstream/python-cachetools/current/tests/test_ttlcache.py
===================================================================
--- packages/branches/upstream/python-cachetools/current/tests/test_ttlcache.py (rev 0)
+++ packages/branches/upstream/python-cachetools/current/tests/test_ttlcache.py 2014-10-31 18:52:10 UTC (rev 31332)
@@ -0,0 +1,125 @@
+import unittest
+
+from . import LRUCacheTestMixin
+from cachetools import TTLCache, ttl_cache
+
+
+ at ttl_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+ at ttl_cache(maxsize=2, typed=True, lock=None)
+def cached_typed(n):
+ return n
+
+
+class TTLCacheTest(unittest.TestCase, LRUCacheTestMixin):
+
+ def make_cache(self, maxsize, getsizeof=None):
+ return TTLCache(maxsize, ttl=0, timer=lambda: 0, getsizeof=getsizeof)
+
+ def make_ttl_cache(self, maxsize, ttl):
+ class Timer:
+ def __init__(self):
+ self.__time = 0
+
+ def __call__(self):
+ return self.__time
+
+ def inc(self):
+ self.__time = self.__time + 1
+
+ return TTLCache(maxsize, ttl, timer=Timer())
+
+ def test_ttl_insert(self):
+ cache = self.make_ttl_cache(maxsize=2, ttl=2)
+ self.assertEqual(cache.ttl, 2)
+
+ cache[1] = 1
+
+ self.assertEqual(1, len(cache))
+ self.assertEqual(1, cache[1])
+
+ cache.timer.inc()
+ cache[2] = 2
+
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertEqual(2, cache[2])
+
+ cache.timer.inc()
+ cache[1]
+ cache[3] = 3
+
+ self.assertEqual(2, len(cache))
+ self.assertEqual(1, cache[1])
+ self.assertNotIn(2, cache)
+ self.assertEqual(3, cache[3])
+
+ def test_ttl_expire(self):
+ cache = self.make_ttl_cache(maxsize=3, ttl=0)
+ self.assertEqual(cache.ttl, 0)
+
+ cache[1] = 1
+ self.assertEqual(1, cache[1])
+ cache.timer.inc()
+ with self.assertRaises(TTLCache.ExpiredError):
+ cache[1]
+ cache[2] = 2
+ self.assertEqual(2, cache[2])
+ cache.timer.inc()
+ with self.assertRaises(TTLCache.ExpiredError):
+ cache[2]
+ cache[3] = 3
+ self.assertEqual(3, cache[3])
+
+ cache.expire(1)
+ self.assertNotIn(1, cache)
+ self.assertEqual(3, cache[3])
+
+ cache.expire(2)
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+ self.assertEqual(3, cache[3])
+
+ cache.timer.inc()
+ cache.expire()
+ self.assertEqual(0, len(cache))
+ self.assertNotIn(1, cache)
+ self.assertNotIn(2, cache)
+ self.assertNotIn(3, cache)
+
+ def test_ttl_tuple_key(self):
+ cache = self.make_ttl_cache(maxsize=1, ttl=0)
+
+ cache[(1, 2, 3)] = 42
+ self.assertEqual(42, cache[(1, 2, 3)])
+ cache.timer.inc()
+ with self.assertRaises(TTLCache.ExpiredError):
+ cache[(1, 2, 3)]
+ cache.expire()
+ self.assertNotIn((1, 2, 3), cache)
+
+ def test_decorator(self):
+ self.assertEqual(cached.cache_info(), (0, 0, 2, 0))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached(1.0), 1.0)
+ self.assertEqual(cached.cache_info(), (2, 1, 2, 1))
+
+ cached.cache_clear()
+ self.assertEqual(cached(1), 1)
+ self.assertEqual(cached.cache_info(), (2, 2, 2, 1))
+
+ def test_typed_decorator(self):
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (0, 1, 2, 1))
+ self.assertEqual(cached_typed(1), 1)
+ self.assertEqual(cached_typed.cache_info(), (1, 1, 2, 1))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (1, 2, 2, 2))
+ self.assertEqual(cached_typed(1.0), 1.0)
+ self.assertEqual(cached_typed.cache_info(), (2, 2, 2, 2))
More information about the Python-modules-commits
mailing list