fix(main.py): removing print_verbose

This commit is contained in:
Krrish Dholakia 2023-10-30 20:37:12 -07:00
parent c04cad208c
commit 3743893e76
3 changed files with 22 additions and 65 deletions

View file

@ -416,9 +416,6 @@ def completion(
or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105
)
# set API KEY
print_verbose(
f"api_key: {api_key}; dynamic_api_key: {dynamic_api_key}; litellm.api_key: {litellm.api_key}; litellm.openai_key: {litellm.openai_key}; os.environ['OPENAI_API_KEY']: {os.environ['OPENAI_API_KEY']}"
)
api_key = (
api_key or
dynamic_api_key or # allows us to read env variables for compatible openai api's like perplexity

View file

@ -1,30 +1,30 @@
#### What this tests ####
# This tests if logging to the helicone integration actually works
# #### What this tests ####
# # This tests if logging to the helicone integration actually works
import sys, os
import traceback
import pytest
# import sys, os
# import traceback
# import pytest
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
import litellm
from litellm import embedding, completion
# sys.path.insert(
# 0, os.path.abspath("../..")
# ) # Adds the parent directory to the system path
# import litellm
# from litellm import embedding, completion
litellm.success_callback = ["helicone"]
# litellm.success_callback = ["helicone"]
litellm.set_verbose = True
# litellm.set_verbose = True
user_message = "Hello, how are you?"
messages = [{"content": user_message, "role": "user"}]
# user_message = "Hello, how are you?"
# messages = [{"content": user_message, "role": "user"}]
# openai call
response = completion(
model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]
)
# # openai call
# response = completion(
# model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]
# )
# cohere call
response = completion(
model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}]
)
# # cohere call
# response = completion(
# model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}]
# )

View file

@ -1,40 +0,0 @@
import sys
import os
import io
sys.path.insert(0, os.path.abspath('../..'))
from litellm import completion
import litellm
litellm.failure_callback = ["sentry"]
import time
def test_exception_tracking():
print('expect this to fail and log to sentry')
litellm.set_verbose=True
old_api_key = os.environ["OPENAI_API_KEY"]
os.environ["OPENAI_API_KEY"] = "ss"
try:
response = completion(model="gpt-3.5-turbo",
messages=[{
"role": "user",
"content": "Hi 👋 - i'm claude"
}],
max_tokens=10,
temperature=0.2
)
print(response)
os.environ["OPENAI_API_KEY"] = old_api_key
except Exception as e:
print("got_exception")
print(e)
os.environ["OPENAI_API_KEY"] = old_api_key
test_exception_tracking()