Skip to content

Commit

Permalink
Introduce skip_cache_func (#652)
Browse files Browse the repository at this point in the history
Co-authored-by: Vitaly Petrov <[email protected]>
Co-authored-by: Sam Bull <[email protected]>
  • Loading branch information
3 people committed Feb 9, 2023
1 parent 0f9eb13 commit 947fbc5
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 2 deletions.
30 changes: 28 additions & 2 deletions aiocache/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ class cached:
:param key_builder: Callable that allows to build the function dynamically. It receives
the function plus same args and kwargs passed to the function.
This behavior is necessarily different than ``BaseCache.build_key()``
:param skip_cache_func: Callable that receives the result after calling the
wrapped function and should return `True` if the value should skip the
cache (or `False` to store in the cache).
e.g. to avoid caching `None` results: `lambda r: r is None`
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is :class:`aiocache.SimpleMemoryCache`.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
Expand All @@ -63,6 +67,7 @@ def __init__(
key=None,
namespace=None,
key_builder=None,
skip_cache_func=lambda x: False,
cache=Cache.MEMORY,
serializer=None,
plugins=None,
Expand All @@ -73,6 +78,7 @@ def __init__(
self.ttl = ttl
self.key = key
self.key_builder = key_builder
self.skip_cache_func = skip_cache_func
self.noself = noself
self.alias = alias
self.cache = None
Expand Down Expand Up @@ -117,6 +123,9 @@ async def decorator(

result = await f(*args, **kwargs)

if self.skip_cache_func(result):
return result

if cache_write:
if aiocache_wait_for_write:
await self.set_in_cache(key, result)
Expand Down Expand Up @@ -182,6 +191,10 @@ class cached_stampede(cached):
:param key_builder: Callable that allows to build the function dynamically. It receives
the function plus same args and kwargs passed to the function.
This behavior is necessarily different than ``BaseCache.build_key()``
:param skip_cache_func: Callable that receives the result after calling the
wrapped function and should return `True` if the value should skip the
cache (or `False` to store in the cache).
e.g. to avoid caching `None` results: `lambda r: r is None`
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is :class:`aiocache.SimpleMemoryCache`.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
Expand Down Expand Up @@ -213,6 +226,9 @@ async def decorator(self, f, *args, **kwargs):

result = await f(*args, **kwargs)

if self.skip_cache_func(result):
return result

await self.set_in_cache(key, result)

return result
Expand Down Expand Up @@ -279,6 +295,9 @@ class multi_cached:
``keys_from_attr``, the decorated callable, and the positional and keyword arguments
that were passed to the decorated callable. This behavior is necessarily different than
``BaseCache.build_key()`` and the call signature differs from ``cached.key_builder``.
:param skip_cache_keys: Callable that receives both key and value and returns True
if that key-value pair should not be cached (or False to store in cache).
The keys and values to be passed are taken from the wrapped function result.
:param ttl: int seconds to store the keys. Default is 0 which means no expiration.
:param cache: cache class to use when calling the ``multi_set``/``multi_get`` operations.
Default is :class:`aiocache.SimpleMemoryCache`.
Expand All @@ -297,6 +316,7 @@ def __init__(
keys_from_attr,
namespace=None,
key_builder=None,
skip_cache_func=lambda k, v: False,
ttl=SENTINEL,
cache=Cache.MEMORY,
serializer=None,
Expand All @@ -306,6 +326,7 @@ def __init__(
):
self.keys_from_attr = keys_from_attr
self.key_builder = key_builder or (lambda key, f, *args, **kwargs: key)
self.skip_cache_func = skip_cache_func
self.ttl = ttl
self.alias = alias
self.cache = None
Expand Down Expand Up @@ -365,12 +386,17 @@ async def decorator(
result = await f(*new_args, **kwargs)
result.update(partial)

to_cache = {k: v for k, v in result.items() if not self.skip_cache_func(k, v)}

if not to_cache:
return result

if cache_write:
if aiocache_wait_for_write:
await self.set_in_cache(result, f, args, kwargs)
await self.set_in_cache(to_cache, f, args, kwargs)
else:
# TODO: Use aiojobs to avoid warnings.
asyncio.create_task(self.set_in_cache(result, f, args, kwargs))
asyncio.create_task(self.set_in_cache(to_cache, f, args, kwargs))

return result

Expand Down
38 changes: 38 additions & 0 deletions tests/acceptance/test_decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,31 @@ async def fn(self, a, b=2):
await fn("self", 1, 3)
assert await cache.exists(build_key(fn, "self", 1, 3)) is True

@pytest.mark.parametrize("decorator", (cached, cached_stampede))
async def test_cached_skip_cache_func(self, cache, decorator):
@decorator(skip_cache_func=lambda r: r is None)
async def sk_func(x):
return x if x > 0 else None

arg = 1
res = await sk_func(arg)
assert res

key = decorator().get_cache_key(sk_func, args=(1,), kwargs={})

assert key
assert await cache.exists(key)
assert await cache.get(key) == res

arg = -1

await sk_func(arg)

key = decorator().get_cache_key(sk_func, args=(-1,), kwargs={})

assert key
assert not await cache.exists(key)

async def test_cached_without_namespace(self, cache):
"""Default cache key is created when no namespace is provided"""
@cached(namespace=None)
Expand Down Expand Up @@ -149,6 +174,19 @@ async def fn(self, keys, market="ES"):
assert await cache.exists("fn_" + _ensure_key(Keys.KEY) + "_ES") is True
assert await cache.exists("fn_" + _ensure_key(Keys.KEY_1) + "_ES") is True

async def test_multi_cached_skip_keys(self, cache):
@multi_cached(keys_from_attr="keys", skip_cache_func=lambda _, v: v is None)
async def multi_sk_fn(keys, values):
return {k: v for k, v in zip(keys, values)}

res = await multi_sk_fn(keys=[Keys.KEY, Keys.KEY_1], values=[42, None])
assert res
assert Keys.KEY in res and Keys.KEY_1 in res

assert await cache.exists(Keys.KEY)
assert await cache.get(Keys.KEY) == res[Keys.KEY]
assert not await cache.exists(Keys.KEY_1)

async def test_fn_with_args(self, cache):
@multi_cached("keys")
async def fn(keys, *args):
Expand Down

0 comments on commit 947fbc5

Please sign in to comment.