forked from phoenix/litellm-mirror
test_llm_guard_triggered
This commit is contained in:
parent
9f927e0e4e
commit
6fa421ed8e
1 changed files with 28 additions and 5 deletions
|
@ -6,7 +6,7 @@ from typing import Optional, List, Union
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
async def chat_completion(session, key, model: Union[str, List] = "gpt-4"):
|
async def chat_completion(session, key, messages, model: Union[str, List] = "gpt-4"):
|
||||||
url = "http://0.0.0.0:4000/chat/completions"
|
url = "http://0.0.0.0:4000/chat/completions"
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {key}",
|
"Authorization": f"Bearer {key}",
|
||||||
|
@ -14,9 +14,7 @@ async def chat_completion(session, key, model: Union[str, List] = "gpt-4"):
|
||||||
}
|
}
|
||||||
data = {
|
data = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": [
|
"messages": messages,
|
||||||
{"role": "user", "content": f"Hello! {str(uuid.uuid4())}"},
|
|
||||||
],
|
|
||||||
"guardrails": ["aporia-post-guard", "aporia-pre-guard"],
|
"guardrails": ["aporia-post-guard", "aporia-pre-guard"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,7 +43,10 @@ async def test_no_llm_guard_triggered():
|
||||||
"""
|
"""
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
response, headers = await chat_completion(
|
response, headers = await chat_completion(
|
||||||
session, "sk-1234", model="fake-openai-endpoint"
|
session,
|
||||||
|
"sk-1234",
|
||||||
|
model="fake-openai-endpoint",
|
||||||
|
messages=[{"role": "user", "content": f"Hello what's the weather"}],
|
||||||
)
|
)
|
||||||
await asyncio.sleep(3)
|
await asyncio.sleep(3)
|
||||||
|
|
||||||
|
@ -57,3 +58,25 @@ async def test_no_llm_guard_triggered():
|
||||||
headers["x-litellm-applied-guardrails"]
|
headers["x-litellm-applied-guardrails"]
|
||||||
== "aporia-pre-guard,aporia-post-guard"
|
== "aporia-pre-guard,aporia-post-guard"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_llm_guard_triggered():
|
||||||
|
"""
|
||||||
|
- Tests a request where no content mod is triggered
|
||||||
|
- Assert that the guardrails applied are returned in the response headers
|
||||||
|
"""
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
try:
|
||||||
|
response, headers = await chat_completion(
|
||||||
|
session,
|
||||||
|
"sk-1234",
|
||||||
|
model="fake-openai-endpoint",
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": f"Hello my name is ishaan@berri.ai"}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
pytest.fail("Should have thrown an exception")
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
assert "Aporia detected and blocked PII" in str(e)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue