diff --git a/docs/my-website/docs/providers/mistral.md b/docs/my-website/docs/providers/mistral.md index b6558435b..8e5e2bf66 100644 --- a/docs/my-website/docs/providers/mistral.md +++ b/docs/my-website/docs/providers/mistral.md @@ -53,6 +53,50 @@ All models listed here https://docs.mistral.ai/platform/endpoints are supported. | open-mixtral-8x22b | `completion(model="mistral/open-mixtral-8x22b", messages)` | +## Function Calling + +```python +from litellm import completion + +# set env +os.environ["MISTRAL_API_KEY"] = "your-api-key" + +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location"], + }, + }, + } +] +messages = [{"role": "user", "content": "What's the weather like in Boston today?"}] + +response = completion( + model="mistral/mistral-large-latest", + messages=messages, + tools=tools, + tool_choice="auto", +) +# Add any assertions, here to check response args +print(response) +assert isinstance(response.choices[0].message.tool_calls[0].function.name, str) +assert isinstance( + response.choices[0].message.tool_calls[0].function.arguments, str +) +``` + ## Sample Usage - Embedding ```python from litellm import embedding