mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
Make OpenAI streaming mode work alongside earlier error detection
This commit is contained in:
parent
091b5577c0
commit
260875e61d
1 changed files with 3 additions and 2 deletions
|
@ -408,7 +408,8 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
else:
|
||||
headers = {}
|
||||
response = raw_response.parse()
|
||||
if type(response) != BaseModel:
|
||||
|
||||
if isinstance(response, str):
|
||||
raise OpenAIError(
|
||||
status_code=422,
|
||||
message="Could not parse response",
|
||||
|
@ -448,7 +449,7 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
headers = {}
|
||||
response = raw_response.parse()
|
||||
|
||||
if type(response) != BaseModel:
|
||||
if isinstance(response, str):
|
||||
raise OpenAIError(
|
||||
status_code=422,
|
||||
message="Could not parse response",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue