forked from phoenix/litellm-mirror
fix(promptlayer.py): fixing promptlayer logging integration
This commit is contained in:
parent
e0a4cf4a52
commit
9d8f872f38
2 changed files with 11 additions and 3 deletions
|
@ -680,7 +680,9 @@ class Logging:
|
|||
complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks)
|
||||
else:
|
||||
self.streaming_chunks.append(result)
|
||||
|
||||
elif isinstance(result, OpenAIObject):
|
||||
result = result.model_dump()
|
||||
|
||||
if complete_streaming_response:
|
||||
self.model_call_details["complete_streaming_response"] = complete_streaming_response
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue