diff --git a/dist/litellm-0.1.452-py3-none-any.whl b/dist/litellm-0.1.452-py3-none-any.whl new file mode 100644 index 000000000..82f8cf309 Binary files /dev/null and b/dist/litellm-0.1.452-py3-none-any.whl differ diff --git a/dist/litellm-0.1.452.tar.gz b/dist/litellm-0.1.452.tar.gz new file mode 100644 index 000000000..17c5607dd Binary files /dev/null and b/dist/litellm-0.1.452.tar.gz differ diff --git a/litellm/__pycache__/__init__.cpython-311.pyc b/litellm/__pycache__/__init__.cpython-311.pyc index e1fe4f2c9..95b0e56e2 100644 Binary files a/litellm/__pycache__/__init__.cpython-311.pyc and b/litellm/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 05ed66e1b..0460bceff 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 4ff3865e4..9b7eb9f69 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/supabase.cpython-311.pyc b/litellm/integrations/__pycache__/supabase.cpython-311.pyc index 26cdede8f..ffb30cda5 100644 Binary files a/litellm/integrations/__pycache__/supabase.cpython-311.pyc and b/litellm/integrations/__pycache__/supabase.cpython-311.pyc differ diff --git a/litellm/tests/test_litedebugger_integration.py b/litellm/tests/test_litedebugger_integration.py index 689c8e1e7..79b143624 100644 --- a/litellm/tests/test_litedebugger_integration.py +++ b/litellm/tests/test_litedebugger_integration.py @@ -1,22 +1,22 @@ -# #### What this tests #### -# # This tests if logging to the litedebugger integration actually works -# # pytest mistakes intentional bad calls as failed tests -> [TODO] fix this -# import sys, os -# import traceback -# import pytest +#### What this tests #### +# This tests if logging to the litedebugger integration actually works +# pytest mistakes intentional bad calls as failed tests -> [TODO] fix this +import sys, os +import traceback +import pytest -# sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path -# import litellm -# from litellm import embedding, completion +sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path +import litellm +from litellm import embedding, completion -# litellm.email = "krrish@berri.ai" +litellm.email = "krrish@berri.ai" -# user_message = "Hello, how are you?" -# messages = [{ "content": user_message,"role": "user"}] +user_message = "Hello, how are you?" +messages = [{ "content": user_message,"role": "user"}] -# #openai call -# response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]) +#openai call +response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]) -# #bad request call -# response = completion(model="chatgpt-test", messages=[{"role": "user", "content": "Hi 👋 - i'm a bad request"}]) +#bad request call +response = completion(model="chatgpt-test", messages=[{"role": "user", "content": "Hi 👋 - i'm a bad request"}]) diff --git a/litellm/utils.py b/litellm/utils.py index a979c5621..a9dd3a20f 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1022,7 +1022,6 @@ def handle_success(args, kwargs, result, start_time, end_time): ) pass - def get_model_list(): global last_fetched_at # if user is using hosted product -> get their updated model list - refresh every 5 minutes @@ -1036,24 +1035,15 @@ def get_model_list(): # make the api call last_fetched_at = time.time() print(f"last_fetched_at: {last_fetched_at}") - response = requests.get( - url="http://api.litellm.ai/get_model_list", - headers={"content-type": "application/json"}, - data=json.dumps({"user_email": user_email}), - ) + response = requests.post(url="http://api.litellm.ai/get_model_list", headers={"content-type": "application/json"}, data=json.dumps({"user_email": user_email})) print_verbose(f"get_model_list response: {response.text}") data = response.json() # update model list model_list = data["model_list"] - # set environment variables - env_dict = data["model_keys"] - for key, value in env_dict.items(): - os.environ[key] = value - litellm.model_list = ( - model_list # update the user's current litellm model list - ) - # return litellm model list by default - return litellm.model_list + return model_list + return None + # return None by default + return None def acreate(*args, **kwargs): ## Thin client to handle the acreate langchain call diff --git a/poetry.lock b/poetry.lock index 3edf35bb2..1c438e0eb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -423,13 +423,13 @@ files = [ [[package]] name = "openai" -version = "0.27.8" +version = "0.27.9" description = "Python client library for the OpenAI API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.27.8-py3-none-any.whl", hash = "sha256:e0a7c2f7da26bdbe5354b03c6d4b82a2f34bd4458c7a17ae1a7092c3e397e03c"}, - {file = "openai-0.27.8.tar.gz", hash = "sha256:2483095c7db1eee274cebac79e315a986c4e55207bb4fa7b82d185b3a2ed9536"}, + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 687a4e0be..3314cd11e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.452" +version = "0.1.454" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"