1"""`functools.lru_cache` compatible memoizing function decorators."""
2
3from __future__ import absolute_import
4
5import collections
6import functools
7import random
8
9try:
10  from time import monotonic as default_timer
11except ImportError:
12  from time import time as default_timer
13
14try:
15  from threading import RLock
16except ImportError:  # pragma: no cover
17  from dummy_threading import RLock
18
19from . import keys
20from .lfu import LFUCache
21from .lru import LRUCache
22from .rr import RRCache
23from .ttl import TTLCache
24
25__all__ = ('lfu_cache', 'lru_cache', 'rr_cache', 'ttl_cache')
26
27_CacheInfo = collections.namedtuple('CacheInfo',
28                                    ['hits', 'misses', 'maxsize', 'currsize'])
29
30
31class _UnboundCache(dict):
32
33  maxsize = None
34
35  @property
36  def currsize(self):
37    return len(self)
38
39
40class _UnboundTTLCache(TTLCache):
41
42  def __init__(self, ttl, timer):
43    TTLCache.__init__(self, float('inf'), ttl, timer)
44
45  @property
46  def maxsize(self):
47    return None
48
49
50def _cache(cache, typed=False):
51
52  def decorator(func):
53    key = keys.typedkey if typed else keys.hashkey
54    lock = RLock()
55    stats = [0, 0]
56
57    def cache_info():
58      with lock:
59        hits, misses = stats
60        maxsize = cache.maxsize
61        currsize = cache.currsize
62      return _CacheInfo(hits, misses, maxsize, currsize)
63
64    def cache_clear():
65      with lock:
66        try:
67          cache.clear()
68        finally:
69          stats[:] = [0, 0]
70
71    def wrapper(*args, **kwargs):
72      k = key(*args, **kwargs)
73      with lock:
74        try:
75          v = cache[k]
76          stats[0] += 1
77          return v
78        except KeyError:
79          stats[1] += 1
80      v = func(*args, **kwargs)
81      try:
82        with lock:
83          cache[k] = v
84      except ValueError:
85        pass  # value too large
86      return v
87
88    functools.update_wrapper(wrapper, func)
89    if not hasattr(wrapper, '__wrapped__'):
90      wrapper.__wrapped__ = func  # Python 2.7
91    wrapper.cache_info = cache_info
92    wrapper.cache_clear = cache_clear
93    return wrapper
94
95  return decorator
96
97
98def lfu_cache(maxsize=128, typed=False):
99  """Decorator to wrap a function with a memoizing callable that saves
100
101    up to `maxsize` results based on a Least Frequently Used (LFU)
102    algorithm.
103
104    """
105  if maxsize is None:
106    return _cache(_UnboundCache(), typed)
107  else:
108    return _cache(LFUCache(maxsize), typed)
109
110
111def lru_cache(maxsize=128, typed=False):
112  """Decorator to wrap a function with a memoizing callable that saves
113
114    up to `maxsize` results based on a Least Recently Used (LRU)
115    algorithm.
116
117    """
118  if maxsize is None:
119    return _cache(_UnboundCache(), typed)
120  else:
121    return _cache(LRUCache(maxsize), typed)
122
123
124def rr_cache(maxsize=128, choice=random.choice, typed=False):
125  """Decorator to wrap a function with a memoizing callable that saves
126
127    up to `maxsize` results based on a Random Replacement (RR)
128    algorithm.
129
130    """
131  if maxsize is None:
132    return _cache(_UnboundCache(), typed)
133  else:
134    return _cache(RRCache(maxsize, choice), typed)
135
136
137def ttl_cache(maxsize=128, ttl=600, timer=default_timer, typed=False):
138  """Decorator to wrap a function with a memoizing callable that saves
139
140    up to `maxsize` results based on a Least Recently Used (LRU)
141    algorithm with a per-item time-to-live (TTL) value.
142    """
143  if maxsize is None:
144    return _cache(_UnboundTTLCache(ttl, timer), typed)
145  else:
146    return _cache(TTLCache(maxsize, ttl, timer), typed)
147