_lru_cache.py 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. """
  2. LRU cache implementation for Python 2.7
  3. Ported from http://code.activestate.com/recipes/578078/ and simplified for our
  4. use (only support maxsize > 0 and positional arguments).
  5. """
  6. from collections import namedtuple
  7. from functools import update_wrapper
  8. from threading import RLock
  9. _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
  10. def lru_cache(maxsize=100):
  11. """Least-recently-used cache decorator.
  12. Arguments to the cached function must be hashable.
  13. See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
  14. """
  15. def decorating_function(user_function):
  16. cache = dict()
  17. stats = [0, 0] # make statistics updateable non-locally
  18. HITS, MISSES = 0, 1 # names for the stats fields
  19. cache_get = cache.get # bound method to lookup key or return None
  20. _len = len # localize the global len() function
  21. lock = RLock() # linkedlist updates aren't threadsafe
  22. root = [] # root of the circular doubly linked list
  23. root[:] = [root, root, None, None] # initialize by pointing to self
  24. nonlocal_root = [root] # make updateable non-locally
  25. PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
  26. assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
  27. def wrapper(*args):
  28. # size limited caching that tracks accesses by recency
  29. key = args
  30. with lock:
  31. link = cache_get(key)
  32. if link is not None:
  33. # record recent use of the key by moving it to the
  34. # front of the list
  35. root, = nonlocal_root
  36. link_prev, link_next, key, result = link
  37. link_prev[NEXT] = link_next
  38. link_next[PREV] = link_prev
  39. last = root[PREV]
  40. last[NEXT] = root[PREV] = link
  41. link[PREV] = last
  42. link[NEXT] = root
  43. stats[HITS] += 1
  44. return result
  45. result = user_function(*args)
  46. with lock:
  47. root, = nonlocal_root
  48. if key in cache:
  49. # getting here means that this same key was added to the
  50. # cache while the lock was released. since the link
  51. # update is already done, we need only return the
  52. # computed result and update the count of misses.
  53. pass
  54. elif _len(cache) >= maxsize:
  55. # use the old root to store the new key and result
  56. oldroot = root
  57. oldroot[KEY] = key
  58. oldroot[RESULT] = result
  59. # empty the oldest link and make it the new root
  60. root = nonlocal_root[0] = oldroot[NEXT]
  61. oldkey = root[KEY]
  62. # oldvalue = root[RESULT]
  63. root[KEY] = root[RESULT] = None
  64. # now update the cache dictionary for the new links
  65. del cache[oldkey]
  66. cache[key] = oldroot
  67. else:
  68. # put result in a new link at the front of the list
  69. last = root[PREV]
  70. link = [last, root, key, result]
  71. last[NEXT] = root[PREV] = cache[key] = link
  72. stats[MISSES] += 1
  73. return result
  74. def cache_info():
  75. """Report cache statistics"""
  76. with lock:
  77. return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
  78. def cache_clear():
  79. """Clear the cache and cache statistics"""
  80. with lock:
  81. cache.clear()
  82. root = nonlocal_root[0]
  83. root[:] = [root, root, None, None]
  84. stats[:] = [0, 0]
  85. wrapper.__wrapped__ = user_function
  86. wrapper.cache_info = cache_info
  87. wrapper.cache_clear = cache_clear
  88. return update_wrapper(wrapper, user_function)
  89. return decorating_function