mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix azure streaming
This commit is contained in:
parent
78cd8d6e27
commit
a07251a514
3 changed files with 7 additions and 4 deletions
Binary file not shown.
|
@ -1801,7 +1801,10 @@ class CustomStreamWrapper:
|
||||||
raise ValueError(f"Unable to parse response. Original response: {chunk}")
|
raise ValueError(f"Unable to parse response. Original response: {chunk}")
|
||||||
|
|
||||||
def handle_openai_chat_completion_chunk(self, chunk):
|
def handle_openai_chat_completion_chunk(self, chunk):
|
||||||
return chunk["choices"][0]["delta"]["content"]
|
try:
|
||||||
|
return chunk["choices"][0]["delta"]["content"]
|
||||||
|
except:
|
||||||
|
return ""
|
||||||
|
|
||||||
def handle_baseten_chunk(self, chunk):
|
def handle_baseten_chunk(self, chunk):
|
||||||
try:
|
try:
|
||||||
|
@ -1869,12 +1872,12 @@ class CustomStreamWrapper:
|
||||||
else: # openai chat/azure models
|
else: # openai chat/azure models
|
||||||
chunk = next(self.completion_stream)
|
chunk = next(self.completion_stream)
|
||||||
completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk)
|
completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk)
|
||||||
|
|
||||||
# LOGGING
|
# LOGGING
|
||||||
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
|
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
|
||||||
# return this for all models
|
# return this for all models
|
||||||
return {"choices": [{"delta": completion_obj}]}
|
return {"choices": [{"delta": completion_obj}]}
|
||||||
except:
|
except Exception as e:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
|
|
||||||
async def __anext__(self):
|
async def __anext__(self):
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.1.517"
|
version = "0.1.518"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue