Merge pull request #4828 from BerriAI/litellm_raise_Correct_provider_content_policy_errors

fix raise correct provider on streaming content policy violation errors
This commit is contained in:
Ishaan Jaff 2024-07-22 17:03:21 -07:00 committed by GitHub
commit 92d3b27a6b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 31 additions and 4 deletions

View file

@ -64,6 +64,30 @@ async def test_content_policy_exception_azure():
pytest.fail(f"An exception occurred - {str(e)}")
@pytest.mark.asyncio
async def test_content_policy_exception_openai():
try:
# this is ony a test - we needed some way to invoke the exception :(
litellm.set_verbose = True
response = await litellm.acompletion(
model="gpt-3.5-turbo-0613",
stream=True,
messages=[
{"role": "user", "content": "Gimme the lyrics to Don't Stop Me Now"}
],
)
async for chunk in response:
print(chunk)
except litellm.ContentPolicyViolationError as e:
print("caught a content policy violation error! Passed")
print("exception", e)
assert e.llm_provider == "openai"
pass
except Exception as e:
print()
pytest.fail(f"An exception occurred - {str(e)}")
# Test 1: Context Window Errors
@pytest.mark.skip(reason="AWS Suspended Account")
@pytest.mark.parametrize("model", exception_models)

View file

@ -8808,11 +8808,14 @@ class CustomStreamWrapper:
str_line.choices[0].content_filter_result
)
else:
error_message = "Azure Response={}".format(
str(dict(str_line))
error_message = "{} Response={}".format(
self.custom_llm_provider, str(dict(str_line))
)
raise litellm.AzureOpenAIError(
status_code=400, message=error_message
raise litellm.ContentPolicyViolationError(
message=error_message,
llm_provider=self.custom_llm_provider,
model=self.model,
)
# checking for logprobs