Skip to content

Commit

Permalink
Create _cache.py
Browse files Browse the repository at this point in the history
  • Loading branch information
BobTheBuidler authored Dec 17, 2024
1 parent 76c11ae commit 353dc6e
Showing 1 changed file with 28 additions and 0 deletions.
28 changes: 28 additions & 0 deletions evmspec/data/_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from time import monotonic
from cachetools import cached, keys
from cachetools.func import TTLCache, _UnboundTTLCache


def ttl_cache(maxsize=128, ttl=600, timer=monotonic, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Recently Used (LRU)
algorithm with a per-item time-to-live (TTL) value.
"""
if maxsize is None:
return _cache(_UnboundTTLCache(ttl, timer), None, typed)
elif callable(maxsize):
return _cache(TTLCache(128, ttl, timer), 128, typed)(maxsize)
else:
return _cache(TTLCache(maxsize, ttl, timer), maxsize, typed)


def _cache(cache, maxsize, typed):
# reimplement ttl_cache with no RLock for race conditions

def decorator(func):
key = keys.typedkey if typed else keys.hashkey
wrapper = cached(cache=cache, key=key, lock=None, info=True)(func)
wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
return wrapper

return decorator

0 comments on commit 353dc6e

Please sign in to comment.