diff --git a/litellm/router.py b/litellm/router.py index c79dc2de09..d7535a83ae 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -2610,8 +2610,17 @@ class Router: if "azure" in model_name: if api_base is None or not isinstance(api_base, str): + filtered_litellm_params = { + k: v + for k, v in model["litellm_params"].items() + if k != "api_key" + } + _filtered_model = { + "model_name": model["model_name"], + "litellm_params": filtered_litellm_params, + } raise ValueError( - f"api_base is required for Azure OpenAI. Set it on your config. Model - {model}" + f"api_base is required for Azure OpenAI. Set it on your config. Model - {_filtered_model}" ) azure_ad_token = litellm_params.get("azure_ad_token") if azure_ad_token is not None: diff --git a/litellm/tests/test_router.py b/litellm/tests/test_router.py index ed35321132..d76dec25c7 100644 --- a/litellm/tests/test_router.py +++ b/litellm/tests/test_router.py @@ -19,6 +19,25 @@ import os, httpx load_dotenv() +def test_router_sensitive_keys(): + try: + router = Router( + model_list=[ + { + "model_name": "gpt-3.5-turbo", # openai model name + "litellm_params": { # params for litellm completion/embedding call + "model": "azure/chatgpt-v-2", + "api_key": "special-key", + }, + "model_info": {"id": 12345}, + }, + ], + ) + except Exception as e: + print(f"error msg - {str(e)}") + assert "special-key" not in str(e) + + @pytest.mark.parametrize("num_retries", [None, 2]) @pytest.mark.parametrize("max_retries", [None, 4]) def test_router_num_retries_init(num_retries, max_retries):