mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix: fix proxy logging
This commit is contained in:
parent
6b8e6497f6
commit
e2831e9c80
3 changed files with 5 additions and 3 deletions
|
@ -1084,7 +1084,7 @@ class Logging:
|
|||
def success_handler(
|
||||
self, result=None, start_time=None, end_time=None, cache_hit=None, **kwargs
|
||||
):
|
||||
verbose_logger.info(f"Logging Details LiteLLM-Success Call")
|
||||
verbose_logger.debug(f"Logging Details LiteLLM-Success Call")
|
||||
# print(f"original response in success handler: {self.model_call_details['original_response']}")
|
||||
try:
|
||||
verbose_logger.debug(f"success callbacks: {litellm.success_callback}")
|
||||
|
@ -1252,6 +1252,9 @@ class Logging:
|
|||
kwargs[k] = v
|
||||
# this only logs streaming once, complete_streaming_response exists i.e when stream ends
|
||||
if self.stream:
|
||||
verbose_logger.debug(
|
||||
f"is complete_streaming_response in kwargs: {kwargs.get('complete_streaming_response', None)}"
|
||||
)
|
||||
if "complete_streaming_response" not in kwargs:
|
||||
break
|
||||
else:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue