diff --git a/litellm/llms/azure.py b/litellm/llms/azure.py index 208c02678..b269afec7 100644 --- a/litellm/llms/azure.py +++ b/litellm/llms/azure.py @@ -262,7 +262,10 @@ class AzureChatCompletion(BaseLLM): exception_mapping_worked = True raise e except Exception as e: - raise AzureOpenAIError(status_code=500, message=str(e)) + if hasattr(e, "status_code"): + raise e + else: + raise AzureOpenAIError(status_code=500, message=str(e)) def streaming(self, logging_obj, diff --git a/litellm/tests/test_proxy_exception_mapping.py b/litellm/tests/test_proxy_exception_mapping.py index 5dcb782c4..c5f99f28c 100644 --- a/litellm/tests/test_proxy_exception_mapping.py +++ b/litellm/tests/test_proxy_exception_mapping.py @@ -68,6 +68,7 @@ def test_chat_completion_exception_azure(client): # make an openai client to call _make_status_error_from_response openai_client = openai.OpenAI(api_key="anything") openai_exception = openai_client._make_status_error_from_response(response=response) + print(openai_exception) assert isinstance(openai_exception, openai.AuthenticationError) except Exception as e: