fix(utils.py): fix cache hits for streaming

Fixes https://github.com/BerriAI/litellm/issues/4109
This commit is contained in:
Krrish Dholakia 2024-07-26 19:03:42 -07:00
parent cd0ec17a29
commit 1562cba823
5 changed files with 42 additions and 16 deletions

View file

@ -1220,7 +1220,9 @@ class Logging:
"""
Implementing async callbacks, to handle asyncio event loop issues when custom integrations need to use async functions.
"""
print_verbose("Logging Details LiteLLM-Async Success Call")
print_verbose(
"Logging Details LiteLLM-Async Success Call, cache_hit={}".format(cache_hit)
)
start_time, end_time, result = self._success_handler_helper_fn(
start_time=start_time, end_time=end_time, result=result, cache_hit=cache_hit
)