mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
updates to tests
This commit is contained in:
parent
ab35fdb635
commit
4d17c57373
3 changed files with 29 additions and 29 deletions
|
@ -2,34 +2,34 @@
|
|||
# This tests error logging (with custom user functions) for the `completion` + `embedding` endpoints without callbacks (i.e. slack, posthog, etc. not set)
|
||||
# Requirements: Remove any env keys you have related to slack/posthog/etc. + anthropic api key (cause an exception)
|
||||
|
||||
import sys, os
|
||||
import traceback
|
||||
# import sys, os
|
||||
# import traceback
|
||||
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
import litellm
|
||||
from litellm import embedding, completion
|
||||
from infisical import InfisicalClient
|
||||
import pytest
|
||||
# sys.path.insert(
|
||||
# 0, os.path.abspath("../..")
|
||||
# ) # Adds the parent directory to the system path
|
||||
# import litellm
|
||||
# from litellm import embedding, completion
|
||||
# from infisical import InfisicalClient
|
||||
# import pytest
|
||||
|
||||
infisical_token = os.environ["INFISICAL_TOKEN"]
|
||||
# infisical_token = os.environ["INFISICAL_TOKEN"]
|
||||
|
||||
litellm.secret_manager_client = InfisicalClient(token=infisical_token)
|
||||
# litellm.secret_manager_client = InfisicalClient(token=infisical_token)
|
||||
|
||||
user_message = "Hello, whats the weather in San Francisco??"
|
||||
messages = [{"content": user_message, "role": "user"}]
|
||||
# user_message = "Hello, whats the weather in San Francisco??"
|
||||
# messages = [{"content": user_message, "role": "user"}]
|
||||
|
||||
|
||||
def test_completion_openai():
|
||||
try:
|
||||
response = completion(model="gpt-3.5-turbo", messages=messages)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
except Exception as e:
|
||||
litellm.secret_manager_client = None
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
litellm.secret_manager_client = None
|
||||
# def test_completion_openai():
|
||||
# try:
|
||||
# response = completion(model="gpt-3.5-turbo", messages=messages)
|
||||
# # Add any assertions here to check the response
|
||||
# print(response)
|
||||
# except Exception as e:
|
||||
# litellm.secret_manager_client = None
|
||||
# pytest.fail(f"Error occurred: {e}")
|
||||
# litellm.secret_manager_client = None
|
||||
|
||||
|
||||
test_completion_openai()
|
||||
# test_completion_openai()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue