mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(utils.py): remove special characters from streaming output
This commit is contained in:
parent
a2f2fd3841
commit
6e7e409615
2 changed files with 60 additions and 34 deletions
|
@ -36,7 +36,7 @@ def test_completion_custom_provider_model_name():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
test_completion_custom_provider_model_name()
|
||||
# test_completion_custom_provider_model_name()
|
||||
|
||||
|
||||
def test_completion_claude():
|
||||
|
@ -221,34 +221,32 @@ def test_get_hf_task_for_model():
|
|||
# ################### Hugging Face TGI models ########################
|
||||
# # TGI model
|
||||
# # this is a TGI model https://huggingface.co/glaiveai/glaive-coder-7b
|
||||
# def hf_test_completion_tgi():
|
||||
# litellm.set_verbose=True
|
||||
# try:
|
||||
# response = litellm.completion(
|
||||
# model="huggingface/mistralai/Mistral-7B-Instruct-v0.1",
|
||||
# messages=[{ "content": "Hello, how are you?","role": "user"}],
|
||||
# api_base="https://3kk3h56912qga4-80.proxy.runpod.net",
|
||||
# )
|
||||
# # Add any assertions here to check the response
|
||||
# print(response)
|
||||
# except Exception as e:
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
def hf_test_completion_tgi():
|
||||
# litellm.set_verbose=True
|
||||
try:
|
||||
response = completion(
|
||||
model = 'huggingface/HuggingFaceH4/zephyr-7b-beta',
|
||||
messages = [{ "content": "Hello, how are you?","role": "user"}],
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
# hf_test_completion_tgi()
|
||||
|
||||
# def hf_test_completion_tgi_stream():
|
||||
# try:
|
||||
# response = litellm.completion(
|
||||
# model="huggingface/glaiveai/glaive-coder-7b",
|
||||
# messages=[{ "content": "Hello, how are you?","role": "user"}],
|
||||
# api_base="https://wjiegasee9bmqke2.us-east-1.aws.endpoints.huggingface.cloud",
|
||||
# stream=True
|
||||
# )
|
||||
# # Add any assertions here to check the response
|
||||
# print(response)
|
||||
# for chunk in response:
|
||||
# print(chunk)
|
||||
# except Exception as e:
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
def hf_test_completion_tgi_stream():
|
||||
try:
|
||||
response = completion(
|
||||
model = 'huggingface/HuggingFaceH4/zephyr-7b-beta',
|
||||
messages = [{ "content": "Hello, how are you?","role": "user"}],
|
||||
stream=True
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
for chunk in response:
|
||||
print(chunk["choices"][0]["delta"]["content"])
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
# hf_test_completion_tgi_stream()
|
||||
|
||||
# ################### Hugging Face Conversational models ########################
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue