forked from phoenix/litellm-mirror
fix(utils.py): fix azure streaming logic
This commit is contained in:
parent
fa8a9568aa
commit
b38c09c87f
2 changed files with 7 additions and 9 deletions
|
@ -222,14 +222,12 @@ tools_schema = [
|
|||
|
||||
def test_completion_azure_stream_special_char():
|
||||
litellm.set_verbose = True
|
||||
messages = [
|
||||
{"role": "user", "content": "Respond with the '<' sign and nothing else."}
|
||||
]
|
||||
messages = [{"role": "user", "content": "hi. respond with the <xml> tag only"}]
|
||||
response = completion(model="azure/chatgpt-v-2", messages=messages, stream=True)
|
||||
response_str = ""
|
||||
for part in response:
|
||||
response_str += part.choices[0].delta.content or ""
|
||||
|
||||
print(f"response_str: {response_str}")
|
||||
assert len(response_str) > 0
|
||||
|
||||
|
||||
|
|
|
@ -8842,11 +8842,11 @@ class CustomStreamWrapper:
|
|||
Output parse <s> / </s> special tokens for sagemaker + hf streaming.
|
||||
"""
|
||||
hold = False
|
||||
# if (
|
||||
# self.custom_llm_provider != "huggingface"
|
||||
# and self.custom_llm_provider != "sagemaker"
|
||||
# ):
|
||||
# return hold, chunk
|
||||
if (
|
||||
self.custom_llm_provider != "huggingface"
|
||||
and self.custom_llm_provider != "sagemaker"
|
||||
):
|
||||
return hold, chunk
|
||||
|
||||
if finish_reason:
|
||||
for token in self.special_tokens:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue