From 091b5577c01009fa246d78805d6322a32835c326 Mon Sep 17 00:00:00 2001 From: cynic64 Date: Wed, 29 Jan 2025 09:32:54 -0500 Subject: [PATCH 1/2] Catch OpenAI parsing errors earlier --- litellm/llms/openai/openai.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/litellm/llms/openai/openai.py b/litellm/llms/openai/openai.py index aa361422fe..69036b08c0 100644 --- a/litellm/llms/openai/openai.py +++ b/litellm/llms/openai/openai.py @@ -408,6 +408,12 @@ class OpenAIChatCompletion(BaseLLM): else: headers = {} response = raw_response.parse() + if type(response) != BaseModel: + raise OpenAIError( + status_code=422, + message="Could not parse response", + ) + return headers, response except openai.APITimeoutError as e: end_time = time.time() @@ -441,6 +447,13 @@ class OpenAIChatCompletion(BaseLLM): else: headers = {} response = raw_response.parse() + + if type(response) != BaseModel: + raise OpenAIError( + status_code=422, + message="Could not parse response", + ) + return headers, response except Exception as e: if raw_response is not None: From 260875e61d72cc35380a4e6ad1cd4dffa8437453 Mon Sep 17 00:00:00 2001 From: cynic64 Date: Wed, 29 Jan 2025 10:38:29 -0500 Subject: [PATCH 2/2] Make OpenAI streaming mode work alongside earlier error detection --- litellm/llms/openai/openai.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/litellm/llms/openai/openai.py b/litellm/llms/openai/openai.py index 69036b08c0..d3fa0ff697 100644 --- a/litellm/llms/openai/openai.py +++ b/litellm/llms/openai/openai.py @@ -408,7 +408,8 @@ class OpenAIChatCompletion(BaseLLM): else: headers = {} response = raw_response.parse() - if type(response) != BaseModel: + + if isinstance(response, str): raise OpenAIError( status_code=422, message="Could not parse response", @@ -448,7 +449,7 @@ class OpenAIChatCompletion(BaseLLM): headers = {} response = raw_response.parse() - if type(response) != BaseModel: + if isinstance(response, str): raise OpenAIError( status_code=422, message="Could not parse response",