diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index bd8f5bfd0a..8dc03d6e00 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -8,6 +8,10 @@ model_list: litellm_params: model: fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct api_key: "os.environ/FIREWORKS" + - model_name: mistral-small-latest + litellm_params: + model: mistral/mistral-small-latest + api_key: "os.environ/MISTRAL_API_KEY" - model_name: tts litellm_params: model: openai/tts-1 diff --git a/litellm/proxy/tests/test_mistral_sdk.py b/litellm/proxy/tests/test_mistral_sdk.py new file mode 100644 index 0000000000..0adc67b938 --- /dev/null +++ b/litellm/proxy/tests/test_mistral_sdk.py @@ -0,0 +1,13 @@ +import os + +from mistralai.client import MistralClient +from mistralai.models.chat_completion import ChatMessage + +client = MistralClient(api_key="sk-1234", endpoint="http://0.0.0.0:4000") +chat_response = client.chat( + model="mistral-small-latest", + messages=[ + {"role": "user", "content": "this is a test request, write a short poem"} + ], +) +print(chat_response.choices[0].message.content)