diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 1092a27d63..8d9eb636d3 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -1667,6 +1667,10 @@ class ProxyException(Exception): self.message = message self.type = type self.param = param + + # If we look on official python OpenAI lib, the code should be a string: + # https://github.com/openai/openai-python/blob/195c05a64d39c87b2dfdf1eca2d339597f1fce03/src/openai/types/shared/error_object.py#L11 + # Related LiteLLM issue: https://github.com/BerriAI/litellm/discussions/4834 self.code = str(code) if headers is not None: for k, v in headers.items(): diff --git a/litellm/tests/test_proxy_exception_mapping.py b/litellm/tests/test_proxy_exception_mapping.py index d5db6f6ab0..a774d1b0ef 100644 --- a/litellm/tests/test_proxy_exception_mapping.py +++ b/litellm/tests/test_proxy_exception_mapping.py @@ -81,6 +81,9 @@ def test_chat_completion_exception(client): code_in_error = json_response["error"]["code"] # OpenAI SDK required code to be STR, https://github.com/BerriAI/litellm/issues/4970 + # If we look on official python OpenAI lib, the code should be a string: + # https://github.com/openai/openai-python/blob/195c05a64d39c87b2dfdf1eca2d339597f1fce03/src/openai/types/shared/error_object.py#L11 + # Related LiteLLM issue: https://github.com/BerriAI/litellm/discussions/4834 assert type(code_in_error) == str # make an openai client to call _make_status_error_from_response