forked from phoenix/litellm-mirror
openai streaming
This commit is contained in:
parent
80fa61cc0e
commit
648330ce64
1 changed files with 2 additions and 1 deletions
|
@ -1625,7 +1625,8 @@ class CustomStreamWrapper:
|
|||
completion_obj["content"] = self.handle_openai_text_completion_chunk(chunk)
|
||||
else: # openai chat/azure models
|
||||
chunk = next(self.completion_stream)
|
||||
completion_obj['content'] = chunk['choices']['delta']
|
||||
return chunk
|
||||
|
||||
# LOGGING
|
||||
self.logging_obj.post_call(completion_obj["content"])
|
||||
# return this for all models
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue