forked from phoenix/litellm-mirror
(fix) proxy - log response before model_dump_json
This commit is contained in:
parent
d1865591aa
commit
40b9f1dcb1
1 changed files with 2 additions and 2 deletions
|
@ -386,13 +386,13 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
response = await openai_aclient.chat.completions.create(
|
||||
**data, timeout=timeout
|
||||
)
|
||||
stringified_response = response.model_dump_json()
|
||||
logging_obj.post_call(
|
||||
input=data["messages"],
|
||||
api_key=api_key,
|
||||
original_response=stringified_response,
|
||||
original_response=response,
|
||||
additional_args={"complete_input_dict": data},
|
||||
)
|
||||
stringified_response = response.model_dump_json()
|
||||
return convert_to_model_response_object(
|
||||
response_object=json.loads(stringified_response),
|
||||
model_response_object=model_response,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue