forked from phoenix/litellm-mirror
docs(azure_ai.md): add azure mistral function calling example to proxy docs
This commit is contained in:
parent
20012b73ba
commit
fa8a9568aa
1 changed files with 108 additions and 3 deletions
|
@ -3,8 +3,6 @@ import TabItem from '@theme/TabItem';
|
||||||
|
|
||||||
# Azure AI Studio
|
# Azure AI Studio
|
||||||
|
|
||||||
## Sample Usage
|
|
||||||
|
|
||||||
**Ensure the following:**
|
**Ensure the following:**
|
||||||
1. The API Base passed ends in the `/v1/` prefix
|
1. The API Base passed ends in the `/v1/` prefix
|
||||||
example:
|
example:
|
||||||
|
@ -14,8 +12,11 @@ import TabItem from '@theme/TabItem';
|
||||||
|
|
||||||
2. The `model` passed is listed in [supported models](#supported-models). You **DO NOT** Need to pass your deployment name to litellm. Example `model=azure/Mistral-large-nmefg`
|
2. The `model` passed is listed in [supported models](#supported-models). You **DO NOT** Need to pass your deployment name to litellm. Example `model=azure/Mistral-large-nmefg`
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
<TabItem value="sdk" label="SDK">
|
||||||
|
|
||||||
**Quick Start**
|
|
||||||
```python
|
```python
|
||||||
import litellm
|
import litellm
|
||||||
response = litellm.completion(
|
response = litellm.completion(
|
||||||
|
@ -26,6 +27,9 @@ response = litellm.completion(
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
<TabItem value="proxy" label="PROXY">
|
||||||
|
|
||||||
## Sample Usage - LiteLLM Proxy
|
## Sample Usage - LiteLLM Proxy
|
||||||
|
|
||||||
1. Add models to your config.yaml
|
1. Add models to your config.yaml
|
||||||
|
@ -99,6 +103,107 @@ response = litellm.completion(
|
||||||
|
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
## Function Calling
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
<TabItem value="sdk" label="SDK">
|
||||||
|
|
||||||
|
```python
|
||||||
|
from litellm import completion
|
||||||
|
|
||||||
|
# set env
|
||||||
|
os.environ["AZURE_MISTRAL_API_KEY"] = "your-api-key"
|
||||||
|
os.environ["AZURE_MISTRAL_API_BASE"] = "your-api-base"
|
||||||
|
|
||||||
|
tools = [
|
||||||
|
{
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": "get_current_weather",
|
||||||
|
"description": "Get the current weather in a given location",
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"location": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The city and state, e.g. San Francisco, CA",
|
||||||
|
},
|
||||||
|
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
||||||
|
},
|
||||||
|
"required": ["location"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
messages = [{"role": "user", "content": "What's the weather like in Boston today?"}]
|
||||||
|
|
||||||
|
response = completion(
|
||||||
|
model="azure/mistral-large-latest",
|
||||||
|
api_base=os.getenv("AZURE_MISTRAL_API_BASE")
|
||||||
|
api_key=os.getenv("AZURE_MISTRAL_API_KEY")
|
||||||
|
messages=messages,
|
||||||
|
tools=tools,
|
||||||
|
tool_choice="auto",
|
||||||
|
)
|
||||||
|
# Add any assertions, here to check response args
|
||||||
|
print(response)
|
||||||
|
assert isinstance(response.choices[0].message.tool_calls[0].function.name, str)
|
||||||
|
assert isinstance(
|
||||||
|
response.choices[0].message.tool_calls[0].function.arguments, str
|
||||||
|
)
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
|
||||||
|
<TabItem value="proxy" label="PROXY">
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl http://0.0.0.0:4000/v1/chat/completions \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Authorization: Bearer $YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"model": "mistral",
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "What'\''s the weather like in Boston today?"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": "get_current_weather",
|
||||||
|
"description": "Get the current weather in a given location",
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"location": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The city and state, e.g. San Francisco, CA"
|
||||||
|
},
|
||||||
|
"unit": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["celsius", "fahrenheit"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["location"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"tool_choice": "auto"
|
||||||
|
}'
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
## Supported Models
|
## Supported Models
|
||||||
|
|
||||||
| Model Name | Function Call |
|
| Model Name | Function Call |
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue