mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(feat) async + stream cache
This commit is contained in:
parent
6f8a35d539
commit
67c730e264
1 changed files with 2 additions and 0 deletions
|
@ -208,6 +208,8 @@ class Cache:
|
|||
litellm.input_callback.append("cache")
|
||||
if "cache" not in litellm.success_callback:
|
||||
litellm.success_callback.append("cache")
|
||||
if "cache" not in litellm._async_success_callback:
|
||||
litellm._async_success_callback.append("cache")
|
||||
|
||||
def get_cache_key(self, *args, **kwargs):
|
||||
"""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue