Merge branch 'main' into litellm_fix_azure_function_calling_streaming

This commit is contained in:
Krish Dholakia 2024-02-22 22:36:38 -08:00 committed by GitHub
commit dd4439b6a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 869 additions and 173 deletions

View file

@ -7,10 +7,9 @@ sys.path.insert(0, os.path.abspath("../.."))
from litellm import completion
import litellm
litellm.success_callback = ["promptlayer"]
litellm.set_verbose = True
import time
import pytest
import time
# def test_promptlayer_logging():
# try:
@ -39,11 +38,16 @@ import time
# test_promptlayer_logging()
@pytest.mark.skip(
reason="this works locally but fails on ci/cd since ci/cd is not reading the stdout correctly"
)
def test_promptlayer_logging_with_metadata():
try:
# Redirect stdout
old_stdout = sys.stdout
sys.stdout = new_stdout = io.StringIO()
litellm.set_verbose = True
litellm.success_callback = ["promptlayer"]
response = completion(
model="gpt-3.5-turbo",
@ -58,15 +62,43 @@ def test_promptlayer_logging_with_metadata():
sys.stdout = old_stdout
output = new_stdout.getvalue().strip()
print(output)
if "LiteLLM: Prompt Layer Logging: success" not in output:
raise Exception("Required log message not found!")
assert "Prompt Layer Logging: success" in output
except Exception as e:
print(e)
pytest.fail(f"Error occurred: {e}")
# test_promptlayer_logging_with_metadata()
@pytest.mark.skip(
reason="this works locally but fails on ci/cd since ci/cd is not reading the stdout correctly"
)
def test_promptlayer_logging_with_metadata_tags():
try:
# Redirect stdout
litellm.set_verbose = True
litellm.success_callback = ["promptlayer"]
old_stdout = sys.stdout
sys.stdout = new_stdout = io.StringIO()
response = completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Hi 👋 - i'm ai21"}],
temperature=0.2,
max_tokens=20,
metadata={"model": "ai21", "pl_tags": ["env:dev"]},
mock_response="this is a mock response",
)
# Restore stdout
time.sleep(1)
sys.stdout = old_stdout
output = new_stdout.getvalue().strip()
print(output)
assert "Prompt Layer Logging: success" in output
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# def test_chat_openai():
# try: