bug fix for litellm proxy implementation

This commit is contained in:
Krrish Dholakia 2023-09-18 12:54:56 -07:00
parent 8c809db567
commit f134de1287
3 changed files with 4 additions and 3 deletions

View file

@ -234,10 +234,11 @@ def completion(
custom_llm_provider = model.split("/", 1)[0]
model = model.split("/", 1)[1]
model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider)
api_key = get_api_key(llm_provider=custom_llm_provider, dynamic_api_key=api_key) # get the api key from the environment if required for the model
if api_key and "sk-litellm" in api_key:
model_api_key = get_api_key(llm_provider=custom_llm_provider, dynamic_api_key=api_key) # get the api key from the environment if required for the model
if model_api_key and "sk-litellm" in model_api_key:
api_base = "https://proxy.litellm.ai"
custom_llm_provider = "openai"
api_key = model_api_key
# check if user passed in any of the OpenAI optional params
optional_params = get_optional_params(
functions=functions,

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.694"
version = "0.1.695"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"