# Negative maxsize is treated as 0
if maxsize < 0:
maxsize = 0
+
elif callable(maxsize) and isinstance(typed, bool):
# The user_function was passed in directly via the maxsize argument
user_function, maxsize = maxsize, 128
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
return update_wrapper(wrapper, user_function)
+
elif maxsize is not None:
raise TypeError(
'Expected first argument to be an integer, a callable, or None')
def wrapper(*args, **kwds):
# No caching -- just a statistics update
nonlocal misses
+
misses += 1
result = user_function(*args, **kwds)
return result
def wrapper(*args, **kwds):
# Simple caching without ordering or size limit
nonlocal hits, misses
+
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
def wrapper(*args, **kwds):
# Size limited caching that tracks accesses by recency
nonlocal root, hits, misses, full
+
key = make_key(args, kwds, typed)
+
with lock:
link = cache_get(key)
if link is not None:
hits += 1
return result
misses += 1
+
result = user_function(*args, **kwds)
+
with lock:
if key in cache:
# Getting here means that this same key was added to the
# update is already done, we need only return the
# computed result and update the count of misses.
pass
+
elif full:
# Use the old root to store the new key and result.
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
+
# Empty the oldest link and make it the new root.
# Keep a reference to the old key and old result to
# prevent their ref counts from going to zero during the
oldkey = root[KEY]
oldresult = root[RESULT]
root[KEY] = root[RESULT] = None
+
# Now update the cache dictionary.
del cache[oldkey]
+
# Save the potentially reentrant cache[key] assignment
# for last, after the root and links have been put in
# a consistent state.
cache[key] = oldroot
+
else:
# Put result in a new link at the front of the queue.
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
+
# Use the cache_len bound method instead of the len() function
# which could potentially be wrapped in an lru_cache itself.
full = (cache_len() >= maxsize)
+
return result
def cache_info():
def cache_clear():
"""Clear the cache and cache statistics"""
nonlocal hits, misses, full
+
with lock:
cache.clear()
root[:] = [root, root, None, None]