(feat) async + stream cache

This commit is contained in:
ishaan-jaff 2023-12-09 13:59:01 -08:00
parent 6f8a35d539
commit 67c730e264

View file

@ -208,6 +208,8 @@ class Cache:
litellm.input_callback.append("cache")
if "cache" not in litellm.success_callback:
litellm.success_callback.append("cache")
if "cache" not in litellm._async_success_callback:
litellm._async_success_callback.append("cache")
def get_cache_key(self, *args, **kwargs):
"""