forked from phoenix/litellm-mirror
fix streaming error
This commit is contained in:
parent
b966baa1f0
commit
34c676a8ec
8 changed files with 10 additions and 8 deletions
BIN
dist/litellm-0.1.644-py3-none-any.whl
vendored
Normal file
BIN
dist/litellm-0.1.644-py3-none-any.whl
vendored
Normal file
Binary file not shown.
BIN
dist/litellm-0.1.644.tar.gz
vendored
Normal file
BIN
dist/litellm-0.1.644.tar.gz
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -121,13 +121,14 @@ def test_openai_chat_completion_call():
|
||||||
print(chunk)
|
print(chunk)
|
||||||
if "content" in chunk["choices"][0]["delta"]:
|
if "content" in chunk["choices"][0]["delta"]:
|
||||||
complete_response += chunk["choices"][0]["delta"]["content"]
|
complete_response += chunk["choices"][0]["delta"]["content"]
|
||||||
if complete_response == "":
|
print(f'complete_chunk: {complete_response}')
|
||||||
|
if complete_response.strip() == "":
|
||||||
raise Exception("Empty response received")
|
raise Exception("Empty response received")
|
||||||
except:
|
except:
|
||||||
print(f"error occurred: {traceback.format_exc()}")
|
print(f"error occurred: {traceback.format_exc()}")
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# test_openai_chat_completion_call()
|
test_openai_chat_completion_call()
|
||||||
async def completion_call():
|
async def completion_call():
|
||||||
try:
|
try:
|
||||||
response = completion(
|
response = completion(
|
||||||
|
|
|
@ -2451,7 +2451,11 @@ class CustomStreamWrapper:
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
try:
|
try:
|
||||||
|
<<<<<<< HEAD
|
||||||
completion_obj = {"content": "", "role": ""} # default to role being assistant
|
completion_obj = {"content": "", "role": ""} # default to role being assistant
|
||||||
|
=======
|
||||||
|
completion_obj = {"content": ""} # default to role being assistant
|
||||||
|
>>>>>>> 31d771b (fix streaming error)
|
||||||
if self.model in litellm.anthropic_models:
|
if self.model in litellm.anthropic_models:
|
||||||
chunk = next(self.completion_stream)
|
chunk = next(self.completion_stream)
|
||||||
completion_obj["content"] = self.handle_anthropic_chunk(chunk)
|
completion_obj["content"] = self.handle_anthropic_chunk(chunk)
|
||||||
|
@ -2496,15 +2500,12 @@ class CustomStreamWrapper:
|
||||||
else: # openai chat/azure models
|
else: # openai chat/azure models
|
||||||
chunk = next(self.completion_stream)
|
chunk = next(self.completion_stream)
|
||||||
completion_obj["content"] = chunk["choices"][0]["delta"]["content"]
|
completion_obj["content"] = chunk["choices"][0]["delta"]["content"]
|
||||||
completion_obj["role"] = chunk["choices"][0]["delta"]["role"]
|
|
||||||
# return chunk # open ai returns finish_reason, we should just return the openai chunk
|
|
||||||
|
|
||||||
#completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk)
|
|
||||||
# LOGGING
|
# LOGGING
|
||||||
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
|
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
|
||||||
# return this for all models
|
# return this for all models
|
||||||
model_response = ModelResponse(stream=True)
|
model_response = ModelResponse(stream=True)
|
||||||
model_response.choices[0].delta = completion_obj
|
model_response.choices[0].delta.content = completion_obj["content"]
|
||||||
return model_response
|
return model_response
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.1.644"
|
version = "0.1.645"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue