diff --git a/docs/my-website/docs/providers/azure_ai.md b/docs/my-website/docs/providers/azure_ai.md index e6ac731d7c..2ddb5137bd 100644 --- a/docs/my-website/docs/providers/azure_ai.md +++ b/docs/my-website/docs/providers/azure_ai.md @@ -1,15 +1,12 @@ # Azure AI Studio ## Sample Usage -Call Azure Command R Plus in the OpenAI Format -- model=`azure/command-r-plus`. The `azure/` prefix sends this to Azure. The `command-r-plus` indicates the base model being called -- `model_id` = This is your deployment name on Azure AI studio +The `azure/` prefix sends this to Azure ```python import litellm response = litellm.completion( - model="azure/command-r-plus", - model_id="Cohere-command-r-plus-gylpd", + model="azure/", api_base="https://Cohere-command-r-plus-gylpd-serverless.eastus2.inference.ai.azure.com/v1/" api_key="eskk******" messages=[{"role": "user", "content": "What is the meaning of life?"}], diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 26dd6cc6e9..aa54f81549 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -59,7 +59,6 @@ def test_completion_azure_command_r(): response = completion( model="azure/command-r-plus", - model_id="Cohere-command-r-plus-gylpd", api_base=os.getenv("AZURE_COHERE_API_BASE"), api_key=os.getenv("AZURE_COHERE_API_KEY"), messages=[{"role": "user", "content": "What is the meaning of life?"}],