From 1a30068f90eb7fef297016d58de03dc63d50d141 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 17 Jun 2024 11:05:24 -0700 Subject: [PATCH] vo - init commit adding codestral API --- litellm/__init__.py | 6 ++++++ litellm/main.py | 5 +++++ litellm/utils.py | 45 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/litellm/__init__.py b/litellm/__init__.py index 353d7ac5b6..02e2c1f280 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -391,6 +391,8 @@ openai_compatible_endpoints: List = [ "api.endpoints.anyscale.com/v1", "api.deepinfra.com/v1/openai", "api.mistral.ai/v1", + "codestral.mistral.ai/v1/chat/completions", + "codestral.mistral.ai/v1/fim/completions", "api.groq.com/openai/v1", "api.deepseek.com/v1", "api.together.xyz/v1", @@ -401,6 +403,8 @@ openai_compatible_providers: List = [ "anyscale", "mistral", "groq", + "codestral", + "text-completion-codestral", "deepseek", "deepinfra", "perplexity", @@ -627,6 +631,8 @@ provider_list: List = [ "anyscale", "mistral", "groq", + "codestral", + "text-completion-codestral", "deepseek", "maritalk", "voyage", diff --git a/litellm/main.py b/litellm/main.py index 77fe38fd2d..91d44833ec 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -322,6 +322,8 @@ async def acompletion( or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "codestral" + or custom_llm_provider == "text-completion-codestral" or custom_llm_provider == "deepseek" or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "huggingface" @@ -1046,6 +1048,8 @@ def completion( or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "codestral" + or custom_llm_provider == "text-completion-codestral" or custom_llm_provider == "deepseek" or custom_llm_provider == "anyscale" or custom_llm_provider == "mistral" @@ -3445,6 +3449,7 @@ async def atext_completion(*args, **kwargs): or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "text-completion-codestral" or custom_llm_provider == "deepseek" or custom_llm_provider == "fireworks_ai" or custom_llm_provider == "text-completion-openai" diff --git a/litellm/utils.py b/litellm/utils.py index 6bc33d73d7..e1b4749171 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2359,6 +2359,7 @@ def get_optional_params( and custom_llm_provider != "together_ai" and custom_llm_provider != "groq" and custom_llm_provider != "deepseek" + and custom_llm_provider != "codestral" and custom_llm_provider != "mistral" and custom_llm_provider != "anthropic" and custom_llm_provider != "cohere_chat" @@ -3007,6 +3008,28 @@ def get_optional_params( optional_params["response_format"] = response_format if seed is not None: optional_params["seed"] = seed + elif custom_llm_provider == "codestral": + # supported_params = get_supported_openai_params( + # model=model, custom_llm_provider=custom_llm_provider + # ) + # _check_valid_arg(supported_params=supported_params) + # optional_params = litellm.DeepInfraConfig().map_openai_params( + # non_default_params=non_default_params, + # optional_params=optional_params, + # model=model, + # ) + pass + elif custom_llm_provider == "text-completion-codestral": + # supported_params = get_supported_openai_params( + # model=model, custom_llm_provider=custom_llm_provider + # ) + # _check_valid_arg(supported_params=supported_params) + # optional_params = litellm.DeepInfraConfig().map_openai_params( + # non_default_params=non_default_params, + # optional_params=optional_params, + # model=model, + # ) + pass elif custom_llm_provider == "deepseek": supported_params = get_supported_openai_params( @@ -3867,6 +3890,14 @@ def get_llm_provider( # groq is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.groq.com/openai/v1 api_base = "https://api.groq.com/openai/v1" dynamic_api_key = get_secret("GROQ_API_KEY") + elif custom_llm_provider == "codestral": + # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions + api_base = "https://codestral.mistral.ai/v1/chat/completions" + dynamic_api_key = get_secret("CODESTRAL_API_KEY") + elif custom_llm_provider == "text-completion-codestral": + # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions + api_base = "https://codestral.mistral.ai/v1/fim/completions" + dynamic_api_key = get_secret("CODESTRAL_API_KEY") elif custom_llm_provider == "deepseek": # deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1 api_base = "https://api.deepseek.com/v1" @@ -3959,6 +3990,12 @@ def get_llm_provider( elif endpoint == "api.groq.com/openai/v1": custom_llm_provider = "groq" dynamic_api_key = get_secret("GROQ_API_KEY") + elif endpoint == "https://codestral.mistral.ai/v1/chat/completions": + custom_llm_provider = "codestral" + dynamic_api_key = get_secret("CODESTRAL_API_KEY") + elif endpoint == "https://codestral.mistral.ai/v1/fim/completions": + custom_llm_provider = "text-completion-codestral" + dynamic_api_key = get_secret("CODESTRAL_API_KEY") elif endpoint == "api.deepseek.com/v1": custom_llm_provider = "deepseek" dynamic_api_key = get_secret("DEEPSEEK_API_KEY") @@ -4642,6 +4679,14 @@ def validate_environment(model: Optional[str] = None) -> dict: keys_in_environment = True else: missing_keys.append("GROQ_API_KEY") + elif ( + custom_llm_provider == "codestral" + or custom_llm_provider == "text-completion-codestral" + ): + if "CODESTRAL_API_KEY" in os.environ: + keys_in_environment = True + else: + missing_keys.append("GROQ_API_KEY") elif custom_llm_provider == "deepseek": if "DEEPSEEK_API_KEY" in os.environ: keys_in_environment = True