fix(utils.py): don't raise error on openai content filter during streaming - return as is

Fixes issue where we would raise an error vs. openai who return the chunk with finish reason as 'content_filter'
This commit is contained in:
Krrish Dholakia 2024-07-25 19:50:07 -07:00
parent 5bec2bf513
commit a2fd8459fc
2 changed files with 50 additions and 15 deletions

View file

@ -8840,21 +8840,6 @@ class CustomStreamWrapper:
if str_line.choices[0].finish_reason:
is_finished = True
finish_reason = str_line.choices[0].finish_reason
if finish_reason == "content_filter":
if hasattr(str_line.choices[0], "content_filter_result"):
error_message = json.dumps(
str_line.choices[0].content_filter_result
)
else:
error_message = "{} Response={}".format(
self.custom_llm_provider, str(dict(str_line))
)
raise litellm.ContentPolicyViolationError(
message=error_message,
llm_provider=self.custom_llm_provider,
model=self.model,
)
# checking for logprobs
if (