fix streaming error

This commit is contained in:
Krrish Dholakia 2023-09-15 15:20:46 -07:00
parent b966baa1f0
commit 34c676a8ec
8 changed files with 10 additions and 8 deletions

View file

@ -2451,7 +2451,11 @@ class CustomStreamWrapper:
def __next__(self):
try:
<<<<<<< HEAD
completion_obj = {"content": "", "role": ""} # default to role being assistant
=======
completion_obj = {"content": ""} # default to role being assistant
>>>>>>> 31d771b (fix streaming error)
if self.model in litellm.anthropic_models:
chunk = next(self.completion_stream)
completion_obj["content"] = self.handle_anthropic_chunk(chunk)
@ -2496,15 +2500,12 @@ class CustomStreamWrapper:
else: # openai chat/azure models
chunk = next(self.completion_stream)
completion_obj["content"] = chunk["choices"][0]["delta"]["content"]
completion_obj["role"] = chunk["choices"][0]["delta"]["role"]
# return chunk # open ai returns finish_reason, we should just return the openai chunk
#completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk)
# LOGGING
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
# return this for all models
model_response = ModelResponse(stream=True)
model_response.choices[0].delta = completion_obj
model_response.choices[0].delta.content = completion_obj["content"]
return model_response
except StopIteration:
raise StopIteration