mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(utils.py): handle original_response being a json
This commit is contained in:
parent
9a4a96f46e
commit
712f89b4f1
1 changed files with 2 additions and 0 deletions
|
@ -956,6 +956,8 @@ class Logging:
|
|||
):
|
||||
# Log the exact result from the LLM API, for streaming - log the type of response received
|
||||
litellm.error_logs["POST_CALL"] = locals()
|
||||
if isinstance(original_response, dict):
|
||||
original_response = json.dumps(original_response)
|
||||
try:
|
||||
self.model_call_details["input"] = input
|
||||
self.model_call_details["api_key"] = api_key
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue