diff options
author | Louis Vézina <[email protected]> | 2019-09-13 15:12:26 -0400 |
---|---|---|
committer | Louis Vézina <[email protected]> | 2019-09-13 15:12:26 -0400 |
commit | 645952c61aba3cccb5ca919be966a7ba02d853fa (patch) | |
tree | 005362d11c260b4293b427ed8ba9d94e8b478ec3 /libs/dogpile | |
parent | 4e7e3a39d26cb3771876b43c82a747f6c6526dda (diff) | |
download | bazarr-645952c61aba3cccb5ca919be966a7ba02d853fa.tar.gz bazarr-645952c61aba3cccb5ca919be966a7ba02d853fa.zip |
WIP
Diffstat (limited to 'libs/dogpile')
-rw-r--r-- | libs/dogpile/__init__.py | 2 | ||||
-rw-r--r-- | libs/dogpile/cache/region.py | 206 | ||||
-rw-r--r-- | libs/dogpile/cache/util.py | 7 | ||||
-rw-r--r-- | libs/dogpile/lock.py | 101 | ||||
-rw-r--r-- | libs/dogpile/util/compat.py | 22 | ||||
-rw-r--r-- | libs/dogpile/util/nameregistry.py | 8 | ||||
-rw-r--r-- | libs/dogpile/util/readwrite_lock.py | 16 |
7 files changed, 229 insertions, 133 deletions
diff --git a/libs/dogpile/__init__.py b/libs/dogpile/__init__.py index 99c384364..fc8fd4524 100644 --- a/libs/dogpile/__init__.py +++ b/libs/dogpile/__init__.py @@ -1,4 +1,4 @@ -__version__ = '0.6.5' +__version__ = '0.7.1' from .lock import Lock # noqa from .lock import NeedRegenerationException # noqa diff --git a/libs/dogpile/cache/region.py b/libs/dogpile/cache/region.py index 1896cbd81..261a8db48 100644 --- a/libs/dogpile/cache/region.py +++ b/libs/dogpile/cache/region.py @@ -10,8 +10,9 @@ from ..util import compat import time import datetime from numbers import Number -from functools import wraps +from functools import wraps, partial import threading +from decorator import decorate _backend_loader = PluginLoader("dogpile.cache") register_backend = _backend_loader.register @@ -188,7 +189,7 @@ class DefaultInvalidationStrategy(RegionInvalidationStrategy): class CacheRegion(object): - """A front end to a particular cache backend. + r"""A front end to a particular cache backend. :param name: Optional, a string name for the region. This isn't used internally @@ -484,6 +485,26 @@ class CacheRegion(object): else: return self._LockWrapper() + # cached value + _actual_backend = None + + @property + def actual_backend(self): + """Return the ultimate backend underneath any proxies. + + The backend might be the result of one or more ``proxy.wrap`` + applications. If so, derive the actual underlying backend. + + .. versionadded:: 0.6.6 + + """ + if self._actual_backend is None: + _backend = self.backend + while hasattr(_backend, 'proxied'): + _backend = _backend.proxied + self._actual_backend = _backend + return self._actual_backend + def invalidate(self, hard=True): """Invalidate this :class:`.CacheRegion`. @@ -723,7 +744,8 @@ class CacheRegion(object): ] def get_or_create( - self, key, creator, expiration_time=None, should_cache_fn=None): + self, key, creator, expiration_time=None, should_cache_fn=None, + creator_args=None): """Return a cached value based on the given key. If the value does not exist or is considered to be expired @@ -759,6 +781,11 @@ class CacheRegion(object): :param creator: function which creates a new value. + :param creator_args: optional tuple of (args, kwargs) that will be + passed to the creator function if present. + + .. versionadded:: 0.7.0 + :param expiration_time: optional expiration time which will overide the expiration time already configured on this :class:`.CacheRegion` if not None. To set no expiration, use the value -1. @@ -799,7 +826,7 @@ class CacheRegion(object): value = self.backend.get(key) if (value is NO_VALUE or value.metadata['v'] != value_version or self.region_invalidator.is_hard_invalidated( - value.metadata["ct"])): + value.metadata["ct"])): raise NeedRegenerationException() ct = value.metadata["ct"] if self.region_invalidator.is_soft_invalidated(ct): @@ -808,7 +835,10 @@ class CacheRegion(object): return value.payload, ct def gen_value(): - created_value = creator() + if creator_args: + created_value = creator(*creator_args[0], **creator_args[1]) + else: + created_value = creator() value = self._value(created_value) if not should_cache_fn or \ @@ -831,8 +861,13 @@ class CacheRegion(object): if self.async_creation_runner: def async_creator(mutex): - return self.async_creation_runner( - self, orig_key, creator, mutex) + if creator_args: + @wraps(creator) + def go(): + return creator(*creator_args[0], **creator_args[1]) + else: + go = creator + return self.async_creation_runner(self, orig_key, go, mutex) else: async_creator = None @@ -896,7 +931,7 @@ class CacheRegion(object): if (value is NO_VALUE or value.metadata['v'] != value_version or self.region_invalidator.is_hard_invalidated( - value.metadata['v'])): + value.metadata['ct'])): # dogpile.core understands a 0 here as # "the value is not available", e.g. # _has_value() will return False. @@ -1228,26 +1263,31 @@ class CacheRegion(object): if function_key_generator is None: function_key_generator = self.function_key_generator - def decorator(fn): + def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): + key = key_generator(*arg, **kw) + + timeout = expiration_time() if expiration_time_is_callable \ + else expiration_time + return self.get_or_create(key, user_func, timeout, + should_cache_fn, (arg, kw)) + + def cache_decorator(user_func): if to_str is compat.string_type: # backwards compatible - key_generator = function_key_generator(namespace, fn) + key_generator = function_key_generator(namespace, user_func) else: key_generator = function_key_generator( - namespace, fn, + namespace, user_func, to_str=to_str) - @wraps(fn) - def decorate(*arg, **kw): + def refresh(*arg, **kw): + """ + Like invalidate, but regenerates the value instead + """ key = key_generator(*arg, **kw) - - @wraps(fn) - def creator(): - return fn(*arg, **kw) - timeout = expiration_time() if expiration_time_is_callable \ - else expiration_time - return self.get_or_create(key, creator, timeout, - should_cache_fn) + value = user_func(*arg, **kw) + self.set(key, value) + return value def invalidate(*arg, **kw): key = key_generator(*arg, **kw) @@ -1261,20 +1301,18 @@ class CacheRegion(object): key = key_generator(*arg, **kw) return self.get(key) - def refresh(*arg, **kw): - key = key_generator(*arg, **kw) - value = fn(*arg, **kw) - self.set(key, value) - return value + user_func.set = set_ + user_func.invalidate = invalidate + user_func.get = get + user_func.refresh = refresh + user_func.original = user_func - decorate.set = set_ - decorate.invalidate = invalidate - decorate.refresh = refresh - decorate.get = get - decorate.original = fn + # Use `decorate` to preserve the signature of :param:`user_func`. - return decorate - return decorator + return decorate(user_func, partial( + get_or_create_for_user_func, key_generator)) + + return cache_decorator def cache_multi_on_arguments( self, namespace=None, expiration_time=None, @@ -1402,50 +1440,49 @@ class CacheRegion(object): if function_multi_key_generator is None: function_multi_key_generator = self.function_multi_key_generator - def decorator(fn): - key_generator = function_multi_key_generator( - namespace, fn, - to_str=to_str) - - @wraps(fn) - def decorate(*arg, **kw): - cache_keys = arg - keys = key_generator(*arg, **kw) - key_lookup = dict(zip(keys, cache_keys)) - - @wraps(fn) - def creator(*keys_to_create): - return fn(*[key_lookup[k] for k in keys_to_create]) - - timeout = expiration_time() if expiration_time_is_callable \ - else expiration_time + def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): + cache_keys = arg + keys = key_generator(*arg, **kw) + key_lookup = dict(zip(keys, cache_keys)) + + @wraps(user_func) + def creator(*keys_to_create): + return user_func(*[key_lookup[k] for k in keys_to_create]) + + timeout = expiration_time() if expiration_time_is_callable \ + else expiration_time + + if asdict: + def dict_create(*keys): + d_values = creator(*keys) + return [ + d_values.get(key_lookup[k], NO_VALUE) + for k in keys] + + def wrap_cache_fn(value): + if value is NO_VALUE: + return False + elif not should_cache_fn: + return True + else: + return should_cache_fn(value) + + result = self.get_or_create_multi( + keys, dict_create, timeout, wrap_cache_fn) + result = dict( + (k, v) for k, v in zip(cache_keys, result) + if v is not NO_VALUE) + else: + result = self.get_or_create_multi( + keys, creator, timeout, + should_cache_fn) - if asdict: - def dict_create(*keys): - d_values = creator(*keys) - return [ - d_values.get(key_lookup[k], NO_VALUE) - for k in keys] - - def wrap_cache_fn(value): - if value is NO_VALUE: - return False - elif not should_cache_fn: - return True - else: - return should_cache_fn(value) - - result = self.get_or_create_multi( - keys, dict_create, timeout, wrap_cache_fn) - result = dict( - (k, v) for k, v in zip(cache_keys, result) - if v is not NO_VALUE) - else: - result = self.get_or_create_multi( - keys, creator, timeout, - should_cache_fn) + return result - return result + def cache_decorator(user_func): + key_generator = function_multi_key_generator( + namespace, user_func, + to_str=to_str) def invalidate(*arg): keys = key_generator(*arg) @@ -1466,7 +1503,7 @@ class CacheRegion(object): def refresh(*arg): keys = key_generator(*arg) - values = fn(*arg) + values = user_func(*arg) if asdict: self.set_multi( dict(zip(keys, [values[a] for a in arg])) @@ -1478,13 +1515,18 @@ class CacheRegion(object): ) return values - decorate.set = set_ - decorate.invalidate = invalidate - decorate.refresh = refresh - decorate.get = get + user_func.set = set_ + user_func.invalidate = invalidate + user_func.refresh = refresh + user_func.get = get + + # Use `decorate` to preserve the signature of :param:`user_func`. + + return decorate(user_func, partial(get_or_create_for_user_func, key_generator)) + + return cache_decorator + - return decorate - return decorator def make_region(*arg, **kw): diff --git a/libs/dogpile/cache/util.py b/libs/dogpile/cache/util.py index 4b56f6429..16bcd1c97 100644 --- a/libs/dogpile/cache/util.py +++ b/libs/dogpile/cache/util.py @@ -1,5 +1,4 @@ from hashlib import sha1 -import inspect from ..util import compat from ..util import langhelpers @@ -28,7 +27,7 @@ def function_key_generator(namespace, fn, to_str=compat.string_type): else: namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace) - args = inspect.getargspec(fn) + args = compat.inspect_getargspec(fn) has_self = args[0] and args[0][0] in ('self', 'cls') def generate_key(*args, **kw): @@ -50,7 +49,7 @@ def function_multi_key_generator(namespace, fn, to_str=compat.string_type): else: namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace) - args = inspect.getargspec(fn) + args = compat.inspect_getargspec(fn) has_self = args[0] and args[0][0] in ('self', 'cls') def generate_keys(*args, **kw): @@ -88,7 +87,7 @@ def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type): else: namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace) - argspec = inspect.getargspec(fn) + argspec = compat.inspect_getargspec(fn) default_list = list(argspec.defaults or []) # Reverse the list, as we want to compare the argspec by negative index, # meaning default_list[0] should be args[-1], which works well with diff --git a/libs/dogpile/lock.py b/libs/dogpile/lock.py index 29f342d84..2ac22dcfe 100644 --- a/libs/dogpile/lock.py +++ b/libs/dogpile/lock.py @@ -69,11 +69,10 @@ class Lock(object): """Return true if the expiration time is reached, or no value is available.""" - return not self._has_value(createdtime) or \ - ( - self.expiretime is not None and - time.time() - createdtime > self.expiretime - ) + return not self._has_value(createdtime) or ( + self.expiretime is not None and + time.time() - createdtime > self.expiretime + ) def _has_value(self, createdtime): """Return true if the creation function has proceeded @@ -91,68 +90,100 @@ class Lock(object): value = NOT_REGENERATED createdtime = -1 - generated = self._enter_create(createdtime) + generated = self._enter_create(value, createdtime) if generated is not NOT_REGENERATED: generated, createdtime = generated return generated elif value is NOT_REGENERATED: + # we called upon the creator, and it said that it + # didn't regenerate. this typically means another + # thread is running the creation function, and that the + # cache should still have a value. However, + # we don't have a value at all, which is unusual since we just + # checked for it, so check again (TODO: is this a real codepath?) try: value, createdtime = value_fn() return value except NeedRegenerationException: - raise Exception("Generation function should " - "have just been called by a concurrent " - "thread.") + raise Exception( + "Generation function should " + "have just been called by a concurrent " + "thread.") else: return value - def _enter_create(self, createdtime): - + def _enter_create(self, value, createdtime): if not self._is_expired(createdtime): return NOT_REGENERATED - async = False + _async = False if self._has_value(createdtime): + has_value = True if not self.mutex.acquire(False): - log.debug("creation function in progress " - "elsewhere, returning") + log.debug( + "creation function in progress " + "elsewhere, returning") return NOT_REGENERATED else: + has_value = False log.debug("no value, waiting for create lock") self.mutex.acquire() try: log.debug("value creation lock %r acquired" % self.mutex) - # see if someone created the value already - try: - value, createdtime = self.value_and_created_fn() - except NeedRegenerationException: - pass - else: - if not self._is_expired(createdtime): - log.debug("value already present") - return value, createdtime - elif self.async_creator: - log.debug("Passing creation lock to async runner") - self.async_creator(self.mutex) - async = True - return value, createdtime - - log.debug("Calling creation function") - created = self.creator() - return created + if not has_value: + # we entered without a value, or at least with "creationtime == + # 0". Run the "getter" function again, to see if another + # thread has already generated the value while we waited on the + # mutex, or if the caller is otherwise telling us there is a + # value already which allows us to use async regeneration. (the + # latter is used by the multi-key routine). + try: + value, createdtime = self.value_and_created_fn() + except NeedRegenerationException: + # nope, nobody created the value, we're it. + # we must create it right now + pass + else: + has_value = True + # caller is telling us there is a value and that we can + # use async creation if it is expired. + if not self._is_expired(createdtime): + # it's not expired, return it + log.debug("Concurrent thread created the value") + return value, createdtime + + # otherwise it's expired, call creator again + + if has_value and self.async_creator: + # we have a value we can return, safe to use async_creator + log.debug("Passing creation lock to async runner") + + # so...run it! + self.async_creator(self.mutex) + _async = True + + # and return the expired value for now + return value, createdtime + + # it's expired, and it's our turn to create it synchronously, *or*, + # there's no value at all, and we have to create it synchronously + log.debug( + "Calling creation function for %s value", + "not-yet-present" if not has_value else + "previously expired" + ) + return self.creator() finally: - if not async: + if not _async: self.mutex.release() log.debug("Released creation lock") - def __enter__(self): return self._enter() def __exit__(self, type, value, traceback): pass - diff --git a/libs/dogpile/util/compat.py b/libs/dogpile/util/compat.py index d29bb1dac..198c76276 100644 --- a/libs/dogpile/util/compat.py +++ b/libs/dogpile/util/compat.py @@ -51,11 +51,33 @@ else: import thread # noqa +if py3k: + import collections + ArgSpec = collections.namedtuple( + "ArgSpec", + ["args", "varargs", "keywords", "defaults"]) + + from inspect import getfullargspec as inspect_getfullargspec + + def inspect_getargspec(func): + return ArgSpec( + *inspect_getfullargspec(func)[0:4] + ) +else: + from inspect import getargspec as inspect_getargspec # noqa + if py3k or jython: import pickle else: import cPickle as pickle # noqa +if py3k: + def read_config_file(config, fileobj): + return config.read_file(fileobj) +else: + def read_config_file(config, fileobj): + return config.readfp(fileobj) + def timedelta_total_seconds(td): if py27: diff --git a/libs/dogpile/util/nameregistry.py b/libs/dogpile/util/nameregistry.py index a5102b238..7087f7cd6 100644 --- a/libs/dogpile/util/nameregistry.py +++ b/libs/dogpile/util/nameregistry.py @@ -50,7 +50,7 @@ class NameRegistry(object): self.creator = creator def get(self, identifier, *args, **kw): - """Get and possibly create the value. + r"""Get and possibly create the value. :param identifier: Hash key for the value. If the creation function is called, this identifier @@ -75,10 +75,12 @@ class NameRegistry(object): if identifier in self._values: return self._values[identifier] else: - self._values[identifier] = value = self.creator(identifier, *args, **kw) + self._values[identifier] = value = self.creator( + identifier, *args, **kw) return value except KeyError: - self._values[identifier] = value = self.creator(identifier, *args, **kw) + self._values[identifier] = value = self.creator( + identifier, *args, **kw) return value finally: self._mutex.release() diff --git a/libs/dogpile/util/readwrite_lock.py b/libs/dogpile/util/readwrite_lock.py index 2196ed7d7..9b953edb8 100644 --- a/libs/dogpile/util/readwrite_lock.py +++ b/libs/dogpile/util/readwrite_lock.py @@ -23,7 +23,7 @@ class ReadWriteMutex(object): def __init__(self): # counts how many asynchronous methods are executing - self.async = 0 + self.async_ = 0 # pointer to thread that is the current sync operation self.current_sync_operation = None @@ -31,7 +31,7 @@ class ReadWriteMutex(object): # condition object to lock on self.condition = threading.Condition(threading.Lock()) - def acquire_read_lock(self, wait = True): + def acquire_read_lock(self, wait=True): """Acquire the 'read' lock.""" self.condition.acquire() try: @@ -45,7 +45,7 @@ class ReadWriteMutex(object): if self.current_sync_operation is not None: return False - self.async += 1 + self.async_ += 1 log.debug("%s acquired read lock", self) finally: self.condition.release() @@ -57,23 +57,23 @@ class ReadWriteMutex(object): """Release the 'read' lock.""" self.condition.acquire() try: - self.async -= 1 + self.async_ -= 1 # check if we are the last asynchronous reader thread # out the door. - if self.async == 0: + if self.async_ == 0: # yes. so if a sync operation is waiting, notifyAll to wake # it up if self.current_sync_operation is not None: self.condition.notifyAll() - elif self.async < 0: + elif self.async_ < 0: raise LockError("Synchronizer error - too many " "release_read_locks called") log.debug("%s released read lock", self) finally: self.condition.release() - def acquire_write_lock(self, wait = True): + def acquire_write_lock(self, wait=True): """Acquire the 'write' lock.""" self.condition.acquire() try: @@ -96,7 +96,7 @@ class ReadWriteMutex(object): self.current_sync_operation = threading.currentThread() # now wait again for asyncs to finish - if self.async > 0: + if self.async_ > 0: if wait: # wait self.condition.wait() |