diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 847e9e656..64cdd8e27 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/main.py b/litellm/main.py index 135f81ce0..f79323236 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -234,10 +234,11 @@ def completion( custom_llm_provider = model.split("/", 1)[0] model = model.split("/", 1)[1] model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider) - api_key = get_api_key(llm_provider=custom_llm_provider, dynamic_api_key=api_key) # get the api key from the environment if required for the model - if api_key and "sk-litellm" in api_key: + model_api_key = get_api_key(llm_provider=custom_llm_provider, dynamic_api_key=api_key) # get the api key from the environment if required for the model + if model_api_key and "sk-litellm" in model_api_key: api_base = "https://proxy.litellm.ai" custom_llm_provider = "openai" + api_key = model_api_key # check if user passed in any of the OpenAI optional params optional_params = get_optional_params( functions=functions, diff --git a/pyproject.toml b/pyproject.toml index 012df37bf..20f9cffdc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.694" +version = "0.1.695" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"