fastapi-redis-cache
fastapi-redis-cache copied to clipboard
handling dirty cache entries
Hi,
Do you any recommendation how to handle cache entries that may have become dirty? e.g. if you have a PUT endpoint modifying a ressource that may be in my cache, I guess the caching mechanism in fast-redis-cache's code will not be aware by pure magic that the cache entry has become dirty. Do I have to handle (update or delete) the cache entry explicitly within the PUT code? This doesn't look very elegant to me, compared to the clean way the rest of the caching is hidden from the developer.
cheers j.
Hey @joelthill I had the same use case. Yes you would have to explicitly update/delete the cache using PUT/DELETE. This is how I modified the code in https://github.com/a-luna/fastapi-redis-cache/blob/main/src/fastapi_redis_cache/cache.py#L46 . I have used Python 3.10 as an example here. If you use earlier versions then you should work with if else
instead of pattern matching with match case
.
client.py
https://github.com/a-luna/fastapi-redis-cache/blob/main/src/fastapi_redis_cache/client.py#L14
ALLOWED_HTTP_TYPES = ["GET", "POST", "PUT", "DELETE"]
router.py
@router.put("/")
@router.delete("/")
@router.get("/")
@cache()
def your_endpoint():
pass
cache.py
def cache(*, expire: Union[int, timedelta] = ONE_YEAR_IN_SECONDS): # type: ignore
"""Enable caching behavior for the decorated function.
Args:
expire (Union[int, timedelta], optional): The number of seconds
from now when the cached response should expire. Defaults to 31,536,000
seconds (i.e., the number of seconds in one year).
"""
def outer_wrapper(func): # type: ignore
@wraps(func)
async def inner_wrapper(*args, **kwargs): # type: ignore # pylint: disable=too-many-return-statements
"""Return cached value if one exists, otherwise evaluate the wrapped function and cache the result."""
func_kwargs = kwargs.copy()
request = func_kwargs.pop("request", None)
response = func_kwargs.pop("response", None)
create_response_directly = not response
if create_response_directly:
response = Response()
redis_cache = FastApiRedisCache()
if redis_cache.not_connected or redis_cache.request_is_not_cacheable(request):
# if the redis client is not connected or request is not cacheable, no caching behavior is performed.
return await get_api_response_async(func, *args, **kwargs)
key = redis_cache.get_cache_key(func, *args, **kwargs)
match request.method:
case "GET" | "POST":
ttl, in_cache = redis_cache.check_cache(key)
if in_cache:
redis_cache.set_response_headers(response, True, deserialize_json(in_cache), ttl)
if redis_cache.requested_resource_not_modified(request, in_cache):
response.status_code = int(HTTPStatus.NOT_MODIFIED)
return (
Response(
content=None,
status_code=response.status_code,
media_type="application/json",
headers=response.headers,
)
if create_response_directly
else response
)
return (
Response(content=in_cache, media_type="application/json", headers=response.headers)
if create_response_directly
else deserialize_json(in_cache)
)
response_data = await get_api_response_async(func, *args, **kwargs)
ttl = calculate_ttl(expire)
cached = redis_cache.add_to_cache(key, response_data, ttl)
if cached:
redis_cache.set_response_headers(
response,
cache_hit=False,
response_data=response_data,
ttl=ttl,
)
return (
Response(
content=serialize_json(response_data),
media_type="application/json",
headers=response.headers,
)
if create_response_directly
else response_data
)
return response_data
case "PUT":
redis_cache.delete_from_cache(key)
response_data = await get_api_response_async(func, *args, **kwargs)
ttl = calculate_ttl(expire)
_ = redis_cache.add_to_cache(key, response_data, ttl)
return
case "DELETE":
_ = redis_cache.delete_from_cache(key)
return
case _:
return Response(
content="Invalid method",
status_code=status.HTTP_405_METHOD_NOT_ALLOWED,
media_type="application/json",
)
return inner_wrapper
return outer_wrapper