forked from phoenix/litellm-mirror
working chat, text for codestral
This commit is contained in:
parent
3ae05c0404
commit
5f76f96e4d
3 changed files with 48 additions and 6 deletions
|
@ -817,6 +817,30 @@ def test_completion_mistral_api():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_completion_codestral_chat_api():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = await litellm.acompletion(
|
||||
model="codestral/codestral-latest",
|
||||
max_tokens=5,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Hey, how's it going?",
|
||||
}
|
||||
],
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
|
||||
# cost = litellm.completion_cost(completion_response=response)
|
||||
# print("cost to make mistral completion=", cost)
|
||||
# assert cost > 0.0
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_mistral_api_mistral_large_function_call():
|
||||
litellm.set_verbose = True
|
||||
tools = [
|
||||
|
|
|
@ -4076,3 +4076,21 @@ async def test_async_text_completion_chat_model_stream():
|
|||
|
||||
|
||||
# asyncio.run(test_async_text_completion_chat_model_stream())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_completion_codestral_fim_api():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = await litellm.atext_completion(
|
||||
model="text-completion-codestral/codestral-2405",
|
||||
prompt="def is_odd(n): \n return n % 2 == 1 \ndef test_is_odd():",
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
|
||||
# cost = litellm.completion_cost(completion_response=response)
|
||||
# print("cost to make mistral completion=", cost)
|
||||
# assert cost > 0.0
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
|
|
@ -3891,12 +3891,12 @@ def get_llm_provider(
|
|||
api_base = "https://api.groq.com/openai/v1"
|
||||
dynamic_api_key = get_secret("GROQ_API_KEY")
|
||||
elif custom_llm_provider == "codestral":
|
||||
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions
|
||||
api_base = "https://codestral.mistral.ai/v1/chat/completions"
|
||||
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1
|
||||
api_base = "https://codestral.mistral.ai/v1"
|
||||
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||
elif custom_llm_provider == "text-completion-codestral":
|
||||
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1/chat/completions
|
||||
api_base = "https://codestral.mistral.ai/v1/fim/completions"
|
||||
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1
|
||||
api_base = "https://codestral.mistral.ai/v1"
|
||||
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||
elif custom_llm_provider == "deepseek":
|
||||
# deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1
|
||||
|
@ -3990,10 +3990,10 @@ def get_llm_provider(
|
|||
elif endpoint == "api.groq.com/openai/v1":
|
||||
custom_llm_provider = "groq"
|
||||
dynamic_api_key = get_secret("GROQ_API_KEY")
|
||||
elif endpoint == "https://codestral.mistral.ai/v1/chat/completions":
|
||||
elif endpoint == "https://codestral.mistral.ai/v1":
|
||||
custom_llm_provider = "codestral"
|
||||
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||
elif endpoint == "https://codestral.mistral.ai/v1/fim/completions":
|
||||
elif endpoint == "https://codestral.mistral.ai/v1":
|
||||
custom_llm_provider = "text-completion-codestral"
|
||||
dynamic_api_key = get_secret("CODESTRAL_API_KEY")
|
||||
elif endpoint == "api.deepseek.com/v1":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue