mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
Update test_proxy_api.py
This commit is contained in:
parent
e676bfdbed
commit
20ece924a7
1 changed files with 12 additions and 12 deletions
|
@ -1,15 +1,15 @@
|
|||
import sys, os
|
||||
import traceback
|
||||
sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path
|
||||
import litellm
|
||||
from litellm import embedding, completion
|
||||
# import sys, os
|
||||
# import traceback
|
||||
# sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path
|
||||
# import litellm
|
||||
# from litellm import embedding, completion
|
||||
|
||||
litellm.api_base = "https://oai.hconeai.com/v1"
|
||||
litellm.headers = {"Helicone-Auth": f"Bearer {os.getenv('HELICONE_API_KEY')}"}
|
||||
# litellm.api_base = "https://oai.hconeai.com/v1"
|
||||
# litellm.headers = {"Helicone-Auth": f"Bearer {os.getenv('HELICONE_API_KEY')}"}
|
||||
|
||||
response = litellm.completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "user", "content": "how does a court case get to the Supreme Court?"}]
|
||||
)
|
||||
# response = litellm.completion(
|
||||
# model="gpt-3.5-turbo",
|
||||
# messages=[{"role": "user", "content": "how does a court case get to the Supreme Court?"}]
|
||||
# )
|
||||
|
||||
print(response)
|
||||
# print(response)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue