mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
working petals implementation
This commit is contained in:
parent
f336dafd04
commit
93a0316ab7
3 changed files with 20 additions and 23 deletions
|
@ -214,15 +214,14 @@ def test_completion_together_ai_stream():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
# def test_petals():
|
||||
# model_name = "stabilityai/StableBeluga2"
|
||||
# try:
|
||||
# response = completion(model=model_name, messages=messages, custom_llm_provider="petals")
|
||||
# # Add any assertions here to check the response
|
||||
# print(response)
|
||||
# except Exception as e:
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
# # test_petals()
|
||||
def test_petals():
|
||||
model_name = "stabilityai/StableBeluga2"
|
||||
try:
|
||||
response = completion(model=model_name, messages=messages, custom_llm_provider="petals", force_timeout=120)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
# def test_baseten_falcon_7bcompletion():
|
||||
# model_name = "qvv0xeq"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue