diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index d3f1bc844b..1092a27d63 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -1661,13 +1661,13 @@ class ProxyException(Exception): message: str, type: str, param: Optional[str], - code: Optional[int], + code: Optional[Union[int, str]] = None, headers: Optional[Dict[str, str]] = None, ): self.message = message self.type = type self.param = param - self.code = code + self.code = str(code) if headers is not None: for k, v in headers.items(): if not isinstance(v, str): @@ -1681,7 +1681,7 @@ class ProxyException(Exception): "No healthy deployment available" in self.message or "No deployments available" in self.message ): - self.code = 429 + self.code = "429" def to_dict(self) -> dict: """Converts the ProxyException instance to a dictionary.""" diff --git a/litellm/tests/test_proxy_exception_mapping.py b/litellm/tests/test_proxy_exception_mapping.py index 07ae7f5a87..d5db6f6ab0 100644 --- a/litellm/tests/test_proxy_exception_mapping.py +++ b/litellm/tests/test_proxy_exception_mapping.py @@ -79,6 +79,10 @@ def test_chat_completion_exception(client): in json_response["error"]["message"] ) + code_in_error = json_response["error"]["code"] + # OpenAI SDK required code to be STR, https://github.com/BerriAI/litellm/issues/4970 + assert type(code_in_error) == str + # make an openai client to call _make_status_error_from_response openai_client = openai.OpenAI(api_key="anything") openai_exception = openai_client._make_status_error_from_response(