diff --git a/docs/my-website/docs/providers/azure_ai.md b/docs/my-website/docs/providers/azure_ai.md
index b8dbe16ba..ed13c5664 100644
--- a/docs/my-website/docs/providers/azure_ai.md
+++ b/docs/my-website/docs/providers/azure_ai.md
@@ -3,8 +3,6 @@ import TabItem from '@theme/TabItem';
# Azure AI Studio
-## Sample Usage
-
**Ensure the following:**
1. The API Base passed ends in the `/v1/` prefix
example:
@@ -14,8 +12,11 @@ import TabItem from '@theme/TabItem';
2. The `model` passed is listed in [supported models](#supported-models). You **DO NOT** Need to pass your deployment name to litellm. Example `model=azure/Mistral-large-nmefg`
+## Usage
+
+
+
-**Quick Start**
```python
import litellm
response = litellm.completion(
@@ -26,6 +27,9 @@ response = litellm.completion(
)
```
+
+
+
## Sample Usage - LiteLLM Proxy
1. Add models to your config.yaml
@@ -99,6 +103,107 @@ response = litellm.completion(
+
+
+
+## Function Calling
+
+
+
+
+```python
+from litellm import completion
+
+# set env
+os.environ["AZURE_MISTRAL_API_KEY"] = "your-api-key"
+os.environ["AZURE_MISTRAL_API_BASE"] = "your-api-base"
+
+tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_current_weather",
+ "description": "Get the current weather in a given location",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "The city and state, e.g. San Francisco, CA",
+ },
+ "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
+ },
+ "required": ["location"],
+ },
+ },
+ }
+]
+messages = [{"role": "user", "content": "What's the weather like in Boston today?"}]
+
+response = completion(
+ model="azure/mistral-large-latest",
+ api_base=os.getenv("AZURE_MISTRAL_API_BASE")
+ api_key=os.getenv("AZURE_MISTRAL_API_KEY")
+ messages=messages,
+ tools=tools,
+ tool_choice="auto",
+)
+# Add any assertions, here to check response args
+print(response)
+assert isinstance(response.choices[0].message.tool_calls[0].function.name, str)
+assert isinstance(
+ response.choices[0].message.tool_calls[0].function.arguments, str
+)
+
+```
+
+
+
+
+
+```bash
+curl http://0.0.0.0:4000/v1/chat/completions \
+-H "Content-Type: application/json" \
+-H "Authorization: Bearer $YOUR_API_KEY" \
+-d '{
+ "model": "mistral",
+ "messages": [
+ {
+ "role": "user",
+ "content": "What'\''s the weather like in Boston today?"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_current_weather",
+ "description": "Get the current weather in a given location",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "The city and state, e.g. San Francisco, CA"
+ },
+ "unit": {
+ "type": "string",
+ "enum": ["celsius", "fahrenheit"]
+ }
+ },
+ "required": ["location"]
+ }
+ }
+ }
+ ],
+ "tool_choice": "auto"
+}'
+
+```
+
+
+
+
## Supported Models
| Model Name | Function Call |