mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
Merge pull request #5478 from BerriAI/litellm_Add_ai21
[Feat] Add AI21 /chat API
This commit is contained in:
commit
dc1b0ec182
9 changed files with 412 additions and 8 deletions
|
@ -2887,6 +2887,7 @@ def get_optional_params(
|
|||
and custom_llm_provider != "groq"
|
||||
and custom_llm_provider != "nvidia_nim"
|
||||
and custom_llm_provider != "cerebras"
|
||||
and custom_llm_provider != "ai21_chat"
|
||||
and custom_llm_provider != "volcengine"
|
||||
and custom_llm_provider != "deepseek"
|
||||
and custom_llm_provider != "codestral"
|
||||
|
@ -3656,6 +3657,16 @@ def get_optional_params(
|
|||
optional_params=optional_params,
|
||||
model=model,
|
||||
)
|
||||
elif custom_llm_provider == "ai21_chat":
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
_check_valid_arg(supported_params=supported_params)
|
||||
optional_params = litellm.AI21ChatConfig().map_openai_params(
|
||||
non_default_params=non_default_params,
|
||||
optional_params=optional_params,
|
||||
model=model,
|
||||
)
|
||||
elif custom_llm_provider == "fireworks_ai":
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
|
@ -4283,6 +4294,8 @@ def get_supported_openai_params(
|
|||
return litellm.NvidiaNimConfig().get_supported_openai_params(model=model)
|
||||
elif custom_llm_provider == "cerebras":
|
||||
return litellm.CerebrasConfig().get_supported_openai_params(model=model)
|
||||
elif custom_llm_provider == "ai21_chat":
|
||||
return litellm.AI21ChatConfig().get_supported_openai_params(model=model)
|
||||
elif custom_llm_provider == "volcengine":
|
||||
return litellm.VolcEngineConfig().get_supported_openai_params(model=model)
|
||||
elif custom_llm_provider == "groq":
|
||||
|
@ -4671,6 +4684,7 @@ def get_llm_provider(
|
|||
):
|
||||
custom_llm_provider = model.split("/", 1)[0]
|
||||
model = model.split("/", 1)[1]
|
||||
|
||||
if custom_llm_provider == "perplexity":
|
||||
# perplexity is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.perplexity.ai
|
||||
api_base = api_base or get_secret("PERPLEXITY_API_BASE") or "https://api.perplexity.ai" # type: ignore
|
||||
|
@ -4717,6 +4731,16 @@ def get_llm_provider(
|
|||
or "https://api.cerebras.ai/v1"
|
||||
) # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("CEREBRAS_API_KEY")
|
||||
elif (custom_llm_provider == "ai21_chat") or (
|
||||
custom_llm_provider == "ai21" and model in litellm.ai21_chat_models
|
||||
):
|
||||
api_base = (
|
||||
api_base
|
||||
or get_secret("AI21_API_BASE")
|
||||
or "https://api.ai21.com/studio/v1"
|
||||
) # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("AI21_API_KEY")
|
||||
custom_llm_provider = "ai21_chat"
|
||||
elif custom_llm_provider == "volcengine":
|
||||
# volcengine is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.endpoints.anyscale.com/v1
|
||||
api_base = (
|
||||
|
@ -4870,6 +4894,9 @@ def get_llm_provider(
|
|||
elif endpoint == "https://api.cerebras.ai/v1":
|
||||
custom_llm_provider = "cerebras"
|
||||
dynamic_api_key = get_secret("CEREBRAS_API_KEY")
|
||||
elif endpoint == "https://api.ai21.com/studio/v1":
|
||||
custom_llm_provider = "ai21_chat"
|
||||
dynamic_api_key = get_secret("AI21_API_KEY")
|
||||
elif endpoint == "https://codestral.mistral.ai/v1":
|
||||
custom_llm_provider = "codestral"
|
||||
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||
|
@ -4953,6 +4980,14 @@ def get_llm_provider(
|
|||
## ai21
|
||||
elif model in litellm.ai21_models:
|
||||
custom_llm_provider = "ai21"
|
||||
elif model in litellm.ai21_chat_models:
|
||||
custom_llm_provider = "ai21_chat"
|
||||
api_base = (
|
||||
api_base
|
||||
or get_secret("AI21_API_BASE")
|
||||
or "https://api.ai21.com/studio/v1"
|
||||
) # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("AI21_API_KEY")
|
||||
## aleph_alpha
|
||||
elif model in litellm.aleph_alpha_models:
|
||||
custom_llm_provider = "aleph_alpha"
|
||||
|
@ -5800,6 +5835,11 @@ def validate_environment(
|
|||
keys_in_environment = True
|
||||
else:
|
||||
missing_keys.append("CEREBRAS_API_KEY")
|
||||
elif custom_llm_provider == "ai21_chat":
|
||||
if "AI21_API_KEY" in os.environ:
|
||||
keys_in_environment = True
|
||||
else:
|
||||
missing_keys.append("AI21_API_KEY")
|
||||
elif custom_llm_provider == "volcengine":
|
||||
if "VOLCENGINE_API_KEY" in os.environ:
|
||||
keys_in_environment = True
|
||||
|
@ -6211,7 +6251,10 @@ def convert_to_model_response_object(
|
|||
if "model" in response_object:
|
||||
if model_response_object.model is None:
|
||||
model_response_object.model = response_object["model"]
|
||||
elif "/" in model_response_object.model:
|
||||
elif (
|
||||
"/" in model_response_object.model
|
||||
and response_object["model"] is not None
|
||||
):
|
||||
openai_compatible_provider = model_response_object.model.split("/")[
|
||||
0
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue