mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(utils.py): async add to cache - for streaming
This commit is contained in:
parent
4912ad5cf4
commit
909341c4f2
1 changed files with 6 additions and 1 deletions
|
@ -1795,7 +1795,12 @@ class Logging:
|
||||||
)
|
)
|
||||||
result = kwargs["async_complete_streaming_response"]
|
result = kwargs["async_complete_streaming_response"]
|
||||||
# only add to cache once we have a complete streaming response
|
# only add to cache once we have a complete streaming response
|
||||||
litellm.cache.add_cache(result, **kwargs)
|
if litellm.cache is not None and not isinstance(
|
||||||
|
litellm.cache.cache, S3Cache
|
||||||
|
):
|
||||||
|
await litellm.cache.async_add_cache(result, **kwargs)
|
||||||
|
else:
|
||||||
|
litellm.cache.add_cache(result, **kwargs)
|
||||||
if isinstance(callback, CustomLogger): # custom logger class
|
if isinstance(callback, CustomLogger): # custom logger class
|
||||||
print_verbose(
|
print_verbose(
|
||||||
f"Running Async success callback: {callback}; self.stream: {self.stream}; async_complete_streaming_response: {self.model_call_details.get('async_complete_streaming_response', None)} result={result}"
|
f"Running Async success callback: {callback}; self.stream: {self.stream}; async_complete_streaming_response: {self.model_call_details.get('async_complete_streaming_response', None)} result={result}"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue