forked from phoenix/litellm-mirror
test - openai content policy errors
This commit is contained in:
parent
8f9638f2c1
commit
69e52e0a47
1 changed files with 24 additions and 0 deletions
|
@ -64,6 +64,30 @@ async def test_content_policy_exception_azure():
|
|||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_content_policy_exception_openai():
|
||||
try:
|
||||
# this is ony a test - we needed some way to invoke the exception :(
|
||||
litellm.set_verbose = True
|
||||
response = await litellm.acompletion(
|
||||
model="gpt-3.5-turbo-0613",
|
||||
stream=True,
|
||||
messages=[
|
||||
{"role": "user", "content": "Gimme the lyrics to Don't Stop Me Now"}
|
||||
],
|
||||
)
|
||||
async for chunk in response:
|
||||
print(chunk)
|
||||
except litellm.ContentPolicyViolationError as e:
|
||||
print("caught a content policy violation error! Passed")
|
||||
print("exception", e)
|
||||
assert e.llm_provider == "openai"
|
||||
pass
|
||||
except Exception as e:
|
||||
print()
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
||||
# Test 1: Context Window Errors
|
||||
@pytest.mark.skip(reason="AWS Suspended Account")
|
||||
@pytest.mark.parametrize("model", exception_models)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue