mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
vo - init commit adding codestral API
This commit is contained in:
parent
a9649c0860
commit
1a30068f90
3 changed files with 56 additions and 0 deletions
|
@ -391,6 +391,8 @@ openai_compatible_endpoints: List = [
|
||||||
"api.endpoints.anyscale.com/v1",
|
"api.endpoints.anyscale.com/v1",
|
||||||
"api.deepinfra.com/v1/openai",
|
"api.deepinfra.com/v1/openai",
|
||||||
"api.mistral.ai/v1",
|
"api.mistral.ai/v1",
|
||||||
|
"codestral.mistral.ai/v1/chat/completions",
|
||||||
|
"codestral.mistral.ai/v1/fim/completions",
|
||||||
"api.groq.com/openai/v1",
|
"api.groq.com/openai/v1",
|
||||||
"api.deepseek.com/v1",
|
"api.deepseek.com/v1",
|
||||||
"api.together.xyz/v1",
|
"api.together.xyz/v1",
|
||||||
|
@ -401,6 +403,8 @@ openai_compatible_providers: List = [
|
||||||
"anyscale",
|
"anyscale",
|
||||||
"mistral",
|
"mistral",
|
||||||
"groq",
|
"groq",
|
||||||
|
"codestral",
|
||||||
|
"text-completion-codestral",
|
||||||
"deepseek",
|
"deepseek",
|
||||||
"deepinfra",
|
"deepinfra",
|
||||||
"perplexity",
|
"perplexity",
|
||||||
|
@ -627,6 +631,8 @@ provider_list: List = [
|
||||||
"anyscale",
|
"anyscale",
|
||||||
"mistral",
|
"mistral",
|
||||||
"groq",
|
"groq",
|
||||||
|
"codestral",
|
||||||
|
"text-completion-codestral",
|
||||||
"deepseek",
|
"deepseek",
|
||||||
"maritalk",
|
"maritalk",
|
||||||
"voyage",
|
"voyage",
|
||||||
|
|
|
@ -322,6 +322,8 @@ async def acompletion(
|
||||||
or custom_llm_provider == "deepinfra"
|
or custom_llm_provider == "deepinfra"
|
||||||
or custom_llm_provider == "perplexity"
|
or custom_llm_provider == "perplexity"
|
||||||
or custom_llm_provider == "groq"
|
or custom_llm_provider == "groq"
|
||||||
|
or custom_llm_provider == "codestral"
|
||||||
|
or custom_llm_provider == "text-completion-codestral"
|
||||||
or custom_llm_provider == "deepseek"
|
or custom_llm_provider == "deepseek"
|
||||||
or custom_llm_provider == "text-completion-openai"
|
or custom_llm_provider == "text-completion-openai"
|
||||||
or custom_llm_provider == "huggingface"
|
or custom_llm_provider == "huggingface"
|
||||||
|
@ -1046,6 +1048,8 @@ def completion(
|
||||||
or custom_llm_provider == "deepinfra"
|
or custom_llm_provider == "deepinfra"
|
||||||
or custom_llm_provider == "perplexity"
|
or custom_llm_provider == "perplexity"
|
||||||
or custom_llm_provider == "groq"
|
or custom_llm_provider == "groq"
|
||||||
|
or custom_llm_provider == "codestral"
|
||||||
|
or custom_llm_provider == "text-completion-codestral"
|
||||||
or custom_llm_provider == "deepseek"
|
or custom_llm_provider == "deepseek"
|
||||||
or custom_llm_provider == "anyscale"
|
or custom_llm_provider == "anyscale"
|
||||||
or custom_llm_provider == "mistral"
|
or custom_llm_provider == "mistral"
|
||||||
|
@ -3445,6 +3449,7 @@ async def atext_completion(*args, **kwargs):
|
||||||
or custom_llm_provider == "deepinfra"
|
or custom_llm_provider == "deepinfra"
|
||||||
or custom_llm_provider == "perplexity"
|
or custom_llm_provider == "perplexity"
|
||||||
or custom_llm_provider == "groq"
|
or custom_llm_provider == "groq"
|
||||||
|
or custom_llm_provider == "text-completion-codestral"
|
||||||
or custom_llm_provider == "deepseek"
|
or custom_llm_provider == "deepseek"
|
||||||
or custom_llm_provider == "fireworks_ai"
|
or custom_llm_provider == "fireworks_ai"
|
||||||
or custom_llm_provider == "text-completion-openai"
|
or custom_llm_provider == "text-completion-openai"
|
||||||
|
|
|
@ -2359,6 +2359,7 @@ def get_optional_params(
|
||||||
and custom_llm_provider != "together_ai"
|
and custom_llm_provider != "together_ai"
|
||||||
and custom_llm_provider != "groq"
|
and custom_llm_provider != "groq"
|
||||||
and custom_llm_provider != "deepseek"
|
and custom_llm_provider != "deepseek"
|
||||||
|
and custom_llm_provider != "codestral"
|
||||||
and custom_llm_provider != "mistral"
|
and custom_llm_provider != "mistral"
|
||||||
and custom_llm_provider != "anthropic"
|
and custom_llm_provider != "anthropic"
|
||||||
and custom_llm_provider != "cohere_chat"
|
and custom_llm_provider != "cohere_chat"
|
||||||
|
@ -3007,6 +3008,28 @@ def get_optional_params(
|
||||||
optional_params["response_format"] = response_format
|
optional_params["response_format"] = response_format
|
||||||
if seed is not None:
|
if seed is not None:
|
||||||
optional_params["seed"] = seed
|
optional_params["seed"] = seed
|
||||||
|
elif custom_llm_provider == "codestral":
|
||||||
|
# supported_params = get_supported_openai_params(
|
||||||
|
# model=model, custom_llm_provider=custom_llm_provider
|
||||||
|
# )
|
||||||
|
# _check_valid_arg(supported_params=supported_params)
|
||||||
|
# optional_params = litellm.DeepInfraConfig().map_openai_params(
|
||||||
|
# non_default_params=non_default_params,
|
||||||
|
# optional_params=optional_params,
|
||||||
|
# model=model,
|
||||||
|
# )
|
||||||
|
pass
|
||||||
|
elif custom_llm_provider == "text-completion-codestral":
|
||||||
|
# supported_params = get_supported_openai_params(
|
||||||
|
# model=model, custom_llm_provider=custom_llm_provider
|
||||||
|
# )
|
||||||
|
# _check_valid_arg(supported_params=supported_params)
|
||||||
|
# optional_params = litellm.DeepInfraConfig().map_openai_params(
|
||||||
|
# non_default_params=non_default_params,
|
||||||
|
# optional_params=optional_params,
|
||||||
|
# model=model,
|
||||||
|
# )
|
||||||
|
pass
|
||||||
|
|
||||||
elif custom_llm_provider == "deepseek":
|
elif custom_llm_provider == "deepseek":
|
||||||
supported_params = get_supported_openai_params(
|
supported_params = get_supported_openai_params(
|
||||||
|
@ -3867,6 +3890,14 @@ def get_llm_provider(
|
||||||
# groq is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.groq.com/openai/v1
|
# groq is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.groq.com/openai/v1
|
||||||
api_base = "https://api.groq.com/openai/v1"
|
api_base = "https://api.groq.com/openai/v1"
|
||||||
dynamic_api_key = get_secret("GROQ_API_KEY")
|
dynamic_api_key = get_secret("GROQ_API_KEY")
|
||||||
|
elif custom_llm_provider == "codestral":
|
||||||
|
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions
|
||||||
|
api_base = "https://codestral.mistral.ai/v1/chat/completions"
|
||||||
|
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||||
|
elif custom_llm_provider == "text-completion-codestral":
|
||||||
|
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions
|
||||||
|
api_base = "https://codestral.mistral.ai/v1/fim/completions"
|
||||||
|
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||||
elif custom_llm_provider == "deepseek":
|
elif custom_llm_provider == "deepseek":
|
||||||
# deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1
|
# deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1
|
||||||
api_base = "https://api.deepseek.com/v1"
|
api_base = "https://api.deepseek.com/v1"
|
||||||
|
@ -3959,6 +3990,12 @@ def get_llm_provider(
|
||||||
elif endpoint == "api.groq.com/openai/v1":
|
elif endpoint == "api.groq.com/openai/v1":
|
||||||
custom_llm_provider = "groq"
|
custom_llm_provider = "groq"
|
||||||
dynamic_api_key = get_secret("GROQ_API_KEY")
|
dynamic_api_key = get_secret("GROQ_API_KEY")
|
||||||
|
elif endpoint == "https://codestral.mistral.ai/v1/chat/completions":
|
||||||
|
custom_llm_provider = "codestral"
|
||||||
|
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||||
|
elif endpoint == "https://codestral.mistral.ai/v1/fim/completions":
|
||||||
|
custom_llm_provider = "text-completion-codestral"
|
||||||
|
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||||
elif endpoint == "api.deepseek.com/v1":
|
elif endpoint == "api.deepseek.com/v1":
|
||||||
custom_llm_provider = "deepseek"
|
custom_llm_provider = "deepseek"
|
||||||
dynamic_api_key = get_secret("DEEPSEEK_API_KEY")
|
dynamic_api_key = get_secret("DEEPSEEK_API_KEY")
|
||||||
|
@ -4642,6 +4679,14 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
else:
|
else:
|
||||||
missing_keys.append("GROQ_API_KEY")
|
missing_keys.append("GROQ_API_KEY")
|
||||||
|
elif (
|
||||||
|
custom_llm_provider == "codestral"
|
||||||
|
or custom_llm_provider == "text-completion-codestral"
|
||||||
|
):
|
||||||
|
if "CODESTRAL_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("GROQ_API_KEY")
|
||||||
elif custom_llm_provider == "deepseek":
|
elif custom_llm_provider == "deepseek":
|
||||||
if "DEEPSEEK_API_KEY" in os.environ:
|
if "DEEPSEEK_API_KEY" in os.environ:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue