test batch_completion_models

This commit is contained in:
ishaan-jaff 2023-09-19 13:29:40 -07:00
parent 2c30855389
commit a230d08795

View file

@ -1,15 +1,15 @@
# #### What this tests ####
# # This tests calling batch_completions by running 100 messages together
#### What this tests ####
# This tests calling batch_completions by running 100 messages together
# import sys, os
# import traceback
# import pytest
# sys.path.insert(
# 0, os.path.abspath("../..")
# ) # Adds the parent directory to the system path
# from openai.error import Timeout
# import litellm
# from litellm import batch_completion
import sys, os
import traceback
import pytest
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
from openai.error import Timeout
import litellm
from litellm import batch_completion, batch_completion_models, completion
# litellm.set_verbose=True
# def test_batch_completions():
@ -23,3 +23,26 @@
# pass
# except Exception as e:
# pytest.fail(f"An error occurred: {e}")
def test_batch_completions_models():
try:
result = batch_completion_models(
models=["gpt-3.5-turbo", "claude-instant-1.2", "command-nightly"],
messages=[{"role": "user", "content": "Hey, how's it going"}]
)
print(result)
except Exception as e:
pytest.fail(f"An error occurred: {e}")
# test_batch_completions_models()
# def test_batch_completions():
# try:
# result = completion(
# model=["gpt-3.5-turbo", "claude-instant-1.2", "command-nightly"],
# messages=[{"role": "user", "content": "Hey, how's it going"}]
# )
# print(result)
# except Exception as e:
# pytest.fail(f"An error occurred: {e}")
# test_batch_completions()