fix streaming error

This commit is contained in:
Krrish Dholakia 2023-09-15 15:20:46 -07:00
parent b966baa1f0
commit 34c676a8ec
8 changed files with 10 additions and 8 deletions

BIN
dist/litellm-0.1.644-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/litellm-0.1.644.tar.gz vendored Normal file

Binary file not shown.

View file

@ -121,13 +121,14 @@ def test_openai_chat_completion_call():
print(chunk)
if "content" in chunk["choices"][0]["delta"]:
complete_response += chunk["choices"][0]["delta"]["content"]
if complete_response == "":
print(f'complete_chunk: {complete_response}')
if complete_response.strip() == "":
raise Exception("Empty response received")
except:
print(f"error occurred: {traceback.format_exc()}")
pass
# test_openai_chat_completion_call()
test_openai_chat_completion_call()
async def completion_call():
try:
response = completion(

View file

@ -2451,7 +2451,11 @@ class CustomStreamWrapper:
def __next__(self):
try:
<<<<<<< HEAD
completion_obj = {"content": "", "role": ""} # default to role being assistant
=======
completion_obj = {"content": ""} # default to role being assistant
>>>>>>> 31d771b (fix streaming error)
if self.model in litellm.anthropic_models:
chunk = next(self.completion_stream)
completion_obj["content"] = self.handle_anthropic_chunk(chunk)
@ -2496,15 +2500,12 @@ class CustomStreamWrapper:
else: # openai chat/azure models
chunk = next(self.completion_stream)
completion_obj["content"] = chunk["choices"][0]["delta"]["content"]
completion_obj["role"] = chunk["choices"][0]["delta"]["role"]
# return chunk # open ai returns finish_reason, we should just return the openai chunk
#completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk)
# LOGGING
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
# return this for all models
model_response = ModelResponse(stream=True)
model_response.choices[0].delta = completion_obj
model_response.choices[0].delta.content = completion_obj["content"]
return model_response
except StopIteration:
raise StopIteration

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.644"
version = "0.1.645"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"