From bf872fc14b1b64feab8caa8e2e59370a5c85c79a Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Wed, 2 Aug 2023 20:52:01 -0700 Subject: [PATCH] Update test_proxy_api.py --- litellm/tests/test_proxy_api.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/litellm/tests/test_proxy_api.py b/litellm/tests/test_proxy_api.py index 348e83e88..c0ce19ab8 100644 --- a/litellm/tests/test_proxy_api.py +++ b/litellm/tests/test_proxy_api.py @@ -1,15 +1,15 @@ -import sys, os -import traceback -sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path -import litellm -from litellm import embedding, completion +# import sys, os +# import traceback +# sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path +# import litellm +# from litellm import embedding, completion -litellm.api_base = "https://oai.hconeai.com/v1" -litellm.headers = {"Helicone-Auth": f"Bearer {os.getenv('HELICONE_API_KEY')}"} +# litellm.api_base = "https://oai.hconeai.com/v1" +# litellm.headers = {"Helicone-Auth": f"Bearer {os.getenv('HELICONE_API_KEY')}"} -response = litellm.completion( - model="gpt-3.5-turbo", - messages=[{"role": "user", "content": "how does a court case get to the Supreme Court?"}] -) +# response = litellm.completion( +# model="gpt-3.5-turbo", +# messages=[{"role": "user", "content": "how does a court case get to the Supreme Court?"}] +# ) -print(response) \ No newline at end of file +# print(response)