aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThomas Kemmer <tkemmer@computer.org>2014-03-22 12:57:39 +0100
committerThomas Kemmer <tkemmer@computer.org>2014-03-22 12:57:39 +0100
commita34b1fa18a3494b6501dc5eaf8114e507fe88cc9 (patch)
tree77346c3a026293e3ea37d8df2d7240b39e24e78f
parentc2e0e6be6252ac1fe568e9107a613c903f37f579 (diff)
downloadcachetools-a34b1fa18a3494b6501dc5eaf8114e507fe88cc9.tar.gz
Add initial implementation, unit tests
-rw-r--r--README.rst45
-rw-r--r--cachetools.py141
-rw-r--r--docs/index.rst20
-rw-r--r--setup.py2
-rw-r--r--tests/test_lfucache.py31
-rw-r--r--tests/test_lrucache.py37
6 files changed, 245 insertions, 31 deletions
diff --git a/README.rst b/README.rst
index df10ac4..afe3375 100644
--- a/README.rst
+++ b/README.rst
@@ -1,11 +1,26 @@
cachetools
========================================================================
-This module...
+This module provides various memoizing collections and function
+decorators, including a variant of the Python 3 functools.lru_cache_
+decorator.
+
+.. note::
+
+ This module is in early pre-alpha, and not fit for *any* purpose
+ (yet).
.. code-block:: pycon
- >>> from cachetools import LRUCache, LFUCache
+ >>> from cachetools import LRUCache
+ >>> cache = LRUCache(maxsize=16)
+ >>> cache['test'] = 1
+ >>> cache.info()
+ CacheInfo(hits=0, misses=0, maxsize=16, currsize=1)
+ >>> cache['test']
+ 1
+ >>> cache.info()
+ CacheInfo(hits=1, misses=0, maxsize=16, currsize=1)
Installation
@@ -19,11 +34,10 @@ Install cachetools using pip::
Project Resources
------------------------------------------------------------------------
-- `Documentation <http://pythonhosted.org/cachetools/>`_
-- `Issue Tracker <https://github.com/tkem/cachetools/issues>`_
-- `Source Code <https://github.com/tkem/cachetools>`_
-- `Change Log <http://raw.github.com/tkem/cachetools/master/Changes>`_
-
+- `Documentation`_
+- `Issue Tracker`_
+- `Source Code`_
+- `Change Log`_
.. image:: https://pypip.in/v/cachetools/badge.png
:target: https://pypi.python.org/pypi/cachetools/
@@ -32,3 +46,20 @@ Project Resources
.. image:: https://pypip.in/d/cachetools/badge.png
:target: https://pypi.python.org/pypi/cachetools/
:alt: Number of PyPI downloads
+
+
+License
+------------------------------------------------------------------------
+
+cachetools is Copyright 2014 Thomas Kemmer.
+
+Licensed under the `MIT License`_.
+
+
+.. _functools.lru_cache: http://docs.python.org/3.4/library/functools.html#functools.lru_cache
+
+.. _Documentation: http://pythonhosted.org/cachetools/
+.. _Source Code: https://github.com/tkem/cachetools/
+.. _Issue Tracker: https://github.com/tkem/cachetools/issues/
+.. _Change Log: https://raw.github.com/tkem/cachetools/master/Changes
+.. _MIT License: http://opensource.org/licenses/MIT
diff --git a/cachetools.py b/cachetools.py
index d21487b..72229a4 100644
--- a/cachetools.py
+++ b/cachetools.py
@@ -1,54 +1,153 @@
"""Python 2.7 memoizing collections and decorators"""
import collections
+import functools
+import threading
-__version__ = '0.0.1'
+__version__ = '0.0.0'
+
+
+CacheInfo = collections.namedtuple('CacheInfo', 'hits misses maxsize currsize')
class LRUCache(collections.MutableMapping):
- def __init__(self, maxsize):
- pass
+ def __init__(self, maxsize, lock=threading.RLock):
+ self.__maxsize = maxsize
+ self.__lock = lock()
+ self.__cache = collections.OrderedDict()
+ self.__hits = 0
+ self.__misses = 0
def __getitem__(self, key):
- pass
+ with self.__lock:
+ try:
+ value = self.__cache[key]
+ except KeyError:
+ self.__misses += 1
+ raise
+ self.__hits += 1
+ self._update(key, value)
+ return value
def __setitem__(self, key, value):
- pass
+ with self.__lock:
+ if len(self.__cache) >= self.__maxsize:
+ # FIXME: popitem
+ del self.__cache[next(iter(self.__cache))]
+ self.__cache[key] = value
+ self._update(key, value)
def __delitem__(self, key):
- pass
+ with self._lock:
+ del self.__cache[key]
def __iter__(self):
- pass
+ return iter(self.__cache)
+
+ def __len__(self):
+ return len(self.__cache)
- def __len(self):
- pass
+ def _update(self, key, value):
+ del self.__cache[key]
+ self.__cache[key] = value
+
+ def info(self):
+ return CacheInfo(self.__hits, self.__misses, self.__maxsize, len(self))
class LFUCache(collections.MutableMapping):
- def __init__(self, maxsize):
- pass
+ def __init__(self, maxsize, lock=threading.RLock):
+ self.__maxsize = maxsize
+ self.__lock = lock()
+ self.__cache = {}
+ self.__count = collections.Counter()
+ self.__hits = 0
+ self.__misses = 0
def __getitem__(self, key):
- pass
+ with self.__lock:
+ value = self.__cache[key]
+ self.__count[key] += 1
+ return value
def __setitem__(self, key, value):
- pass
+ with self.__lock:
+ if len(self.__cache) >= self.__maxsize:
+ key, _ = self.__count.most_common()[-1]
+ del self.__cache[key]
+ del self.__count[key]
+ self.__cache[key] = value
+ self.__count[key] = 0
def __delitem__(self, key):
- pass
+ del self.__cache[key]
+ del self.__count[key]
def __iter__(self):
- pass
+ return iter(self.__cache)
+
+ def __len__(self):
+ return len(self.__cache)
+
+ def info(self):
+ return CacheInfo(self.__hits, self.__misses, self.__maxsize, len(self))
+
+
+def makekey(args, kwargs, typed=False):
+ # TODO: support typed argument keys
+ return (tuple(sorted(kwargs.items()))) + args
+
+
+def lru_cache(maxsize=128, typed=False, key=makekey):
+ def decorator(func):
+ cache = LRUCache(maxsize)
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ key = makekey(args, kwargs, typed)
+ try:
+ return cache[key]
+ except KeyError:
+ result = func(*args, **kwargs)
+ cache[key] = result
+ return result
+
+ def cache_info():
+ return cache.info()
+
+ def cache_clear():
+ cache.clear()
+
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return wrapper
+
+ return decorator
+
+
+def lfu_cache(maxsize=128, typed=False, key=makekey):
+ def decorator(func):
+ cache = LRUCache(maxsize)
- def __len(self):
- pass
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ key = makekey(args, kwargs, typed)
+ try:
+ return cache[key]
+ except KeyError:
+ result = func(*args, **kwargs)
+ cache[key] = result
+ return result
+ def cache_info():
+ return cache.info()
-def lru_cache(maxsize=128, typed=False):
- pass
+ def cache_clear():
+ cache.clear()
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return wrapper
-def lfu_cache(maxsize=128, typed=False):
- pass
+ return decorator
diff --git a/docs/index.rst b/docs/index.rst
index 1cc052f..38c0c0d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -3,12 +3,26 @@
.. module:: cachetools
-This module...
+This module provides various memoizing collections and function
+decorators, including a variant of the Python 3 functools.lru_cache_
+decorator.
+.. note::
+
+ This module is in early pre-alpha, and not fit for *any* purpose
+ (yet).
.. code-block:: pycon
- >>> from cachetools import LRUCache, LFUCache
+ >>> from cachetools import LRUCache
+ >>> cache = LRUCache(maxsize=16)
+ >>> cache['test'] = 1
+ >>> cache.info()
+ CacheInfo(hits=0, misses=0, maxsize=16, currsize=1)
+ >>> cache['test']
+ 1
+ >>> cache.info()
+ CacheInfo(hits=1, misses=0, maxsize=16, currsize=1)
.. autoclass:: LRUCache
@@ -21,3 +35,5 @@ This module...
.. autofunction:: lru_cache
.. autofunction:: lfu_cache
+
+.. _functools.lru_cache: http://docs.python.org/3.4/library/functools.html#functools.lru_cache
diff --git a/setup.py b/setup.py
index d7bae66..e165d15 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ setup(
long_description=open('README.rst').read(),
keywords='cache caching lru lfu ttl',
classifiers=[
- 'Development Status :: 3 - Alpha',
+ 'Development Status :: 2 - Pre-Alpha',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
diff --git a/tests/test_lfucache.py b/tests/test_lfucache.py
new file mode 100644
index 0000000..f9e4e01
--- /dev/null
+++ b/tests/test_lfucache.py
@@ -0,0 +1,31 @@
+import unittest
+
+from cachetools import LFUCache, lfu_cache
+
+
+@lfu_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+class LFUCacheTest(unittest.TestCase):
+
+ def test_insert(self):
+ cache = LFUCache(maxsize=2)
+ cache['a'] = 0
+ cache['a'] = 1
+ cache['b'] = 2
+ cache['c'] = 3
+
+ self.assertEqual(cache['a'], 1)
+ self.assertTrue('b' in cache or 'c' in cache)
+ self.assertTrue('b' not in cache or 'c' not in cache)
+
+ cache['a'] = 4
+ self.assertEqual(cache['a'], 4)
+
+ def test_decorator(self):
+ self.assertEqual(cached(1), 1)
+ self.assertItemsEqual(cached.cache_info(), [0, 1, 2, 1])
+ self.assertEqual(cached(1), 1)
+ self.assertItemsEqual(cached.cache_info(), [1, 1, 2, 1])
diff --git a/tests/test_lrucache.py b/tests/test_lrucache.py
new file mode 100644
index 0000000..d5eea5d
--- /dev/null
+++ b/tests/test_lrucache.py
@@ -0,0 +1,37 @@
+import unittest
+
+from cachetools import LRUCache, lru_cache
+
+
+@lru_cache(maxsize=2)
+def cached(n):
+ return n
+
+
+class LRUCacheTest(unittest.TestCase):
+
+ def test_insert(self):
+ cache = LRUCache(maxsize=2)
+ cache['a'] = 1
+ cache['b'] = 2
+ cache['c'] = 3
+
+ self.assertEqual(cache['b'], 2)
+ self.assertEqual(cache['c'], 3)
+ self.assertNotIn('a', cache)
+
+ cache['a'] = 4
+ self.assertEqual(cache['a'], 4)
+ self.assertEqual(cache['c'], 3)
+ self.assertNotIn('b', cache)
+
+ cache['b'] = 5
+ self.assertEqual(cache['b'], 5)
+ self.assertEqual(cache['c'], 3)
+ self.assertNotIn('a', cache)
+
+ def test_decorator(self):
+ self.assertEqual(cached(1), 1)
+ self.assertItemsEqual(cached.cache_info(), [0, 1, 2, 1])
+ self.assertEqual(cached(1), 1)
+ self.assertItemsEqual(cached.cache_info(), [1, 1, 2, 1])