mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(utils.py): fix azure streaming logic
This commit is contained in:
parent
fa8a9568aa
commit
b38c09c87f
2 changed files with 7 additions and 9 deletions
|
@ -222,14 +222,12 @@ tools_schema = [
|
||||||
|
|
||||||
def test_completion_azure_stream_special_char():
|
def test_completion_azure_stream_special_char():
|
||||||
litellm.set_verbose = True
|
litellm.set_verbose = True
|
||||||
messages = [
|
messages = [{"role": "user", "content": "hi. respond with the <xml> tag only"}]
|
||||||
{"role": "user", "content": "Respond with the '<' sign and nothing else."}
|
|
||||||
]
|
|
||||||
response = completion(model="azure/chatgpt-v-2", messages=messages, stream=True)
|
response = completion(model="azure/chatgpt-v-2", messages=messages, stream=True)
|
||||||
response_str = ""
|
response_str = ""
|
||||||
for part in response:
|
for part in response:
|
||||||
response_str += part.choices[0].delta.content or ""
|
response_str += part.choices[0].delta.content or ""
|
||||||
|
print(f"response_str: {response_str}")
|
||||||
assert len(response_str) > 0
|
assert len(response_str) > 0
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8842,11 +8842,11 @@ class CustomStreamWrapper:
|
||||||
Output parse <s> / </s> special tokens for sagemaker + hf streaming.
|
Output parse <s> / </s> special tokens for sagemaker + hf streaming.
|
||||||
"""
|
"""
|
||||||
hold = False
|
hold = False
|
||||||
# if (
|
if (
|
||||||
# self.custom_llm_provider != "huggingface"
|
self.custom_llm_provider != "huggingface"
|
||||||
# and self.custom_llm_provider != "sagemaker"
|
and self.custom_llm_provider != "sagemaker"
|
||||||
# ):
|
):
|
||||||
# return hold, chunk
|
return hold, chunk
|
||||||
|
|
||||||
if finish_reason:
|
if finish_reason:
|
||||||
for token in self.special_tokens:
|
for token in self.special_tokens:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue