(Feat) - LiteLLM Use UsernamePasswordCredential for Azure OpenAI (#7496)

* add get_azure_ad_token_from_username_password

* docs azure use username / password for auth

* update doc

* get_azure_ad_token_from_username_password

* test test_get_azure_ad_token_from_username_password
This commit is contained in:
Ishaan Jaff 2025-01-01 14:11:27 -08:00 committed by GitHub
parent 2979b8301c
commit 38bfefa6ef
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 131 additions and 8 deletions

View file

@ -528,6 +528,39 @@ Example video of using `tenant_id`, `client_id`, `client_secret` with LiteLLM Pr
<iframe width="840" height="500" src="https://www.loom.com/embed/70d3f219ee7f4e5d84778b7f17bba506?sid=04b8ff29-485f-4cb8-929e-6b392722f36d" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
### Entrata ID - use client_id, username, password
Here is an example of setting up `client_id`, `azure_username`, `azure_password` in your litellm proxy `config.yaml`
```yaml
model_list:
- model_name: gpt-3.5-turbo
litellm_params:
model: azure/chatgpt-v-2
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_version: "2023-05-15"
client_id: os.environ/AZURE_CLIENT_ID
azure_username: os.environ/AZURE_USERNAME
azure_password: os.environ/AZURE_PASSWORD
```
Test it
```shell
curl --location 'http://0.0.0.0:4000/chat/completions' \
--header 'Content-Type: application/json' \
--data ' {
"model": "gpt-3.5-turbo",
"messages": [
{
"role": "user",
"content": "what llm are you"
}
]
}
'
```
### Azure AD Token Refresh - `DefaultAzureCredential`
Use this if you want to use Azure `DefaultAzureCredential` for Authentication on your requests

View file

@ -137,3 +137,44 @@ def get_azure_ad_token_from_entrata_id(
verbose_logger.debug("token_provider %s", token_provider)
return token_provider
def get_azure_ad_token_from_username_password(
client_id: str,
azure_username: str,
azure_password: str,
scope: str = "https://cognitiveservices.azure.com/.default",
) -> Callable[[], str]:
"""
Get Azure AD token provider from `client_id`, `azure_username`, and `azure_password`
Args:
client_id: str
azure_username: str
azure_password: str
scope: str
Returns:
callable that returns a bearer token.
"""
from azure.identity import UsernamePasswordCredential, get_bearer_token_provider
verbose_logger.debug(
"client_id %s, azure_username %s, azure_password %s",
client_id,
azure_username,
azure_password,
)
credential = UsernamePasswordCredential(
client_id=client_id,
username=azure_username,
password=azure_password,
)
verbose_logger.debug("credential %s", credential)
token_provider = get_bearer_token_provider(credential, scope)
verbose_logger.debug("token_provider %s", token_provider)
return token_provider

View file

@ -1,12 +1,11 @@
model_list:
- model_name: "*"
- model_name: "azure/*"
litellm_params:
model: "openai/*"
api_key: os.environ/OPENAI_API_KEY
- model_name: "openai/*"
litellm_params:
model: "openai/*"
api_key: os.environ/OPENAI_API_KEY
model: azure/chatgpt-v-2
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
client_id: os.environ/AZURE_CLIENT_ID
azure_username: os.environ/AZURE_USERNAME
azure_password: os.environ/AZURE_PASSWORD
litellm_settings:
callbacks: ["datadog"]

View file

@ -9,7 +9,10 @@ import litellm
from litellm import get_secret, get_secret_str
from litellm._logging import verbose_router_logger
from litellm.llms.azure.azure import get_azure_ad_token_from_oidc
from litellm.llms.azure.common_utils import get_azure_ad_token_from_entrata_id
from litellm.llms.azure.common_utils import (
get_azure_ad_token_from_entrata_id,
get_azure_ad_token_from_username_password,
)
from litellm.secret_managers.get_azure_ad_token_provider import (
get_azure_ad_token_provider,
)
@ -201,6 +204,14 @@ class InitalizeOpenAISDKClient:
client_id=litellm_params.get("client_id"),
client_secret=litellm_params.get("client_secret"),
)
if litellm_params.get("azure_username") and litellm_params.get(
"azure_password"
):
azure_ad_token_provider = get_azure_ad_token_from_username_password(
azure_username=litellm_params.get("azure_username"),
azure_password=litellm_params.get("azure_password"),
client_id=litellm_params.get("client_id"),
)
if custom_llm_provider == "azure" or custom_llm_provider == "azure_text":
if api_base is None or not isinstance(api_base, str):

View file

@ -1678,6 +1678,8 @@ all_litellm_params = [
"azure_ad_token_provider",
"tenant_id",
"client_id",
"azure_username",
"azure_password",
"client_secret",
"user_continue_message",
"configurable_clientside_auth_params",

View file

@ -211,3 +211,40 @@ class TestAzureEmbedding(BaseLLMEmbeddingTest):
def get_custom_llm_provider(self) -> litellm.LlmProviders:
return litellm.LlmProviders.AZURE
@patch("azure.identity.UsernamePasswordCredential")
@patch("azure.identity.get_bearer_token_provider")
def test_get_azure_ad_token_from_username_password(
mock_get_bearer_token_provider, mock_credential
):
from litellm.llms.azure.common_utils import (
get_azure_ad_token_from_username_password,
)
# Test inputs
client_id = "test-client-id"
username = "test-username"
password = "test-password"
# Mock the token provider function
mock_token_provider = lambda: "mock-token"
mock_get_bearer_token_provider.return_value = mock_token_provider
# Call the function
result = get_azure_ad_token_from_username_password(
client_id=client_id, azure_username=username, azure_password=password
)
# Verify UsernamePasswordCredential was called with correct arguments
mock_credential.assert_called_once_with(
client_id=client_id, username=username, password=password
)
# Verify get_bearer_token_provider was called
mock_get_bearer_token_provider.assert_called_once_with(
mock_credential.return_value, "https://cognitiveservices.azure.com/.default"
)
# Verify the result is the mock token provider
assert result == mock_token_provider