test(test_completion.py): fix test

This commit is contained in:
Krrish Dholakia 2024-02-03 21:30:45 -08:00
parent ef0171e063
commit 3a19c8b600

View file

@ -37,11 +37,11 @@ def test_completion_custom_provider_model_name():
try: try:
litellm.cache = None litellm.cache = None
response = completion( response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1", model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages, messages=messages,
logger_fn=logger_fn, logger_fn=logger_fn,
) )
# Add any assertions here to check the response # Add any assertions here to check the, response
print(response) print(response)
print(response["choices"][0]["finish_reason"]) print(response["choices"][0]["finish_reason"])
except Exception as e: except Exception as e:
@ -1369,7 +1369,7 @@ def test_customprompt_together_ai():
print(litellm.success_callback) print(litellm.success_callback)
print(litellm._async_success_callback) print(litellm._async_success_callback)
response = completion( response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1", model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages, messages=messages,
roles={ roles={
"system": { "system": {
@ -1998,7 +1998,7 @@ def test_completion_together_ai_stream():
messages = [{"content": user_message, "role": "user"}] messages = [{"content": user_message, "role": "user"}]
try: try:
response = completion( response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1", model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages, messages=messages,
stream=True, stream=True,
max_tokens=5, max_tokens=5,