From 5f76f96e4d9354b31eb9fb93d1fa71dae1d84a72 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 17 Jun 2024 11:30:22 -0700 Subject: [PATCH] working chat, text for codestral --- litellm/tests/test_completion.py | 24 ++++++++++++++++++++++++ litellm/tests/test_text_completion.py | 18 ++++++++++++++++++ litellm/utils.py | 12 ++++++------ 3 files changed, 48 insertions(+), 6 deletions(-) diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 911446846..0786a45e9 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -817,6 +817,30 @@ def test_completion_mistral_api(): pytest.fail(f"Error occurred: {e}") +@pytest.mark.asyncio +async def test_completion_codestral_chat_api(): + try: + litellm.set_verbose = True + response = await litellm.acompletion( + model="codestral/codestral-latest", + max_tokens=5, + messages=[ + { + "role": "user", + "content": "Hey, how's it going?", + } + ], + ) + # Add any assertions here to check the response + print(response) + + # cost = litellm.completion_cost(completion_response=response) + # print("cost to make mistral completion=", cost) + # assert cost > 0.0 + except Exception as e: + pytest.fail(f"Error occurred: {e}") + + def test_completion_mistral_api_mistral_large_function_call(): litellm.set_verbose = True tools = [ diff --git a/litellm/tests/test_text_completion.py b/litellm/tests/test_text_completion.py index 65d5bcac2..5edfb1935 100644 --- a/litellm/tests/test_text_completion.py +++ b/litellm/tests/test_text_completion.py @@ -4076,3 +4076,21 @@ async def test_async_text_completion_chat_model_stream(): # asyncio.run(test_async_text_completion_chat_model_stream()) + + +@pytest.mark.asyncio +async def test_completion_codestral_fim_api(): + try: + litellm.set_verbose = True + response = await litellm.atext_completion( + model="text-completion-codestral/codestral-2405", + prompt="def is_odd(n): \n return n % 2 == 1 \ndef test_is_odd():", + ) + # Add any assertions here to check the response + print(response) + + # cost = litellm.completion_cost(completion_response=response) + # print("cost to make mistral completion=", cost) + # assert cost > 0.0 + except Exception as e: + pytest.fail(f"Error occurred: {e}") diff --git a/litellm/utils.py b/litellm/utils.py index e1b474917..c720a24cd 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3891,12 +3891,12 @@ def get_llm_provider( api_base = "https://api.groq.com/openai/v1" dynamic_api_key = get_secret("GROQ_API_KEY") elif custom_llm_provider == "codestral": - # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions - api_base = "https://codestral.mistral.ai/v1/chat/completions" + # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1 + api_base = "https://codestral.mistral.ai/v1" dynamic_api_key = get_secret("CODESTRAL_API_KEY") elif custom_llm_provider == "text-completion-codestral": - # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions - api_base = "https://codestral.mistral.ai/v1/fim/completions" + # codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1 + api_base = "https://codestral.mistral.ai/v1" dynamic_api_key = get_secret("CODESTRAL_API_KEY") elif custom_llm_provider == "deepseek": # deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1 @@ -3990,10 +3990,10 @@ def get_llm_provider( elif endpoint == "api.groq.com/openai/v1": custom_llm_provider = "groq" dynamic_api_key = get_secret("GROQ_API_KEY") - elif endpoint == "https://codestral.mistral.ai/v1/chat/completions": + elif endpoint == "https://codestral.mistral.ai/v1": custom_llm_provider = "codestral" dynamic_api_key = get_secret("CODESTRAL_API_KEY") - elif endpoint == "https://codestral.mistral.ai/v1/fim/completions": + elif endpoint == "https://codestral.mistral.ai/v1": custom_llm_provider = "text-completion-codestral" dynamic_api_key = get_secret("CODESTRAL_API_KEY") elif endpoint == "api.deepseek.com/v1":