diff --git a/docs/my-website/docs/providers/azure_ai.md b/docs/my-website/docs/providers/azure_ai.md new file mode 100644 index 000000000..3504cd6b8 --- /dev/null +++ b/docs/my-website/docs/providers/azure_ai.md @@ -0,0 +1,37 @@ +# Azure AI Studio + +## Using Mistral models deployed on Azure AI Studio + +**Ensure you have the `/v1` in your api_base** + +### Sample Usage +```python +from litellm import completion +import os + +response = completion( + model="mistral/Mistral-large-dfgfj", + api_base="https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com/v1", + api_key = "JGbKodRcTp****" + messages=[ + {"role": "user", "content": "hello from litellm"} + ], +) +print(response) +``` + +### [LiteLLM Proxy] Using Mistral Models + +Set this on your litellm proxy config.yaml + +**Ensure you have the `/v1` in your api_base** +```yaml +model_list: + - model_name: mistral + litellm_params: + model: mistral/Mistral-large-dfgfj + api_base: https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com/v1 + api_key: JGbKodRcTp**** +``` + + diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 86112b6d2..7a24723af 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -120,6 +120,7 @@ const sidebars = { "providers/openai", "providers/openai_compatible", "providers/azure", + "providers/azure_ai", "providers/huggingface", "providers/ollama", "providers/vertex",