forked from phoenix/litellm-mirror
Merge pull request #4828 from BerriAI/litellm_raise_Correct_provider_content_policy_errors
fix raise correct provider on streaming content policy violation errors
This commit is contained in:
commit
92d3b27a6b
2 changed files with 31 additions and 4 deletions
|
@ -64,6 +64,30 @@ async def test_content_policy_exception_azure():
|
||||||
pytest.fail(f"An exception occurred - {str(e)}")
|
pytest.fail(f"An exception occurred - {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_content_policy_exception_openai():
|
||||||
|
try:
|
||||||
|
# this is ony a test - we needed some way to invoke the exception :(
|
||||||
|
litellm.set_verbose = True
|
||||||
|
response = await litellm.acompletion(
|
||||||
|
model="gpt-3.5-turbo-0613",
|
||||||
|
stream=True,
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "Gimme the lyrics to Don't Stop Me Now"}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async for chunk in response:
|
||||||
|
print(chunk)
|
||||||
|
except litellm.ContentPolicyViolationError as e:
|
||||||
|
print("caught a content policy violation error! Passed")
|
||||||
|
print("exception", e)
|
||||||
|
assert e.llm_provider == "openai"
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
print()
|
||||||
|
pytest.fail(f"An exception occurred - {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
# Test 1: Context Window Errors
|
# Test 1: Context Window Errors
|
||||||
@pytest.mark.skip(reason="AWS Suspended Account")
|
@pytest.mark.skip(reason="AWS Suspended Account")
|
||||||
@pytest.mark.parametrize("model", exception_models)
|
@pytest.mark.parametrize("model", exception_models)
|
||||||
|
|
|
@ -8808,11 +8808,14 @@ class CustomStreamWrapper:
|
||||||
str_line.choices[0].content_filter_result
|
str_line.choices[0].content_filter_result
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
error_message = "Azure Response={}".format(
|
error_message = "{} Response={}".format(
|
||||||
str(dict(str_line))
|
self.custom_llm_provider, str(dict(str_line))
|
||||||
)
|
)
|
||||||
raise litellm.AzureOpenAIError(
|
|
||||||
status_code=400, message=error_message
|
raise litellm.ContentPolicyViolationError(
|
||||||
|
message=error_message,
|
||||||
|
llm_provider=self.custom_llm_provider,
|
||||||
|
model=self.model,
|
||||||
)
|
)
|
||||||
|
|
||||||
# checking for logprobs
|
# checking for logprobs
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue