forked from phoenix/litellm-mirror
add testing for guardrails
This commit is contained in:
parent
6fa421ed8e
commit
b2d63f752c
1 changed files with 39 additions and 3 deletions
|
@ -6,18 +6,33 @@ from typing import Optional, List, Union
|
|||
import uuid
|
||||
|
||||
|
||||
async def chat_completion(session, key, messages, model: Union[str, List] = "gpt-4"):
|
||||
async def chat_completion(
|
||||
session,
|
||||
key,
|
||||
messages,
|
||||
model: Union[str, List] = "gpt-4",
|
||||
guardrails: Optional[List] = None,
|
||||
):
|
||||
url = "http://0.0.0.0:4000/chat/completions"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
data = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"guardrails": ["aporia-post-guard", "aporia-pre-guard"],
|
||||
"guardrails": [
|
||||
"aporia-post-guard",
|
||||
"aporia-pre-guard",
|
||||
], # default guardrails for all tests
|
||||
}
|
||||
|
||||
if guardrails is not None:
|
||||
data["guardrails"] = guardrails
|
||||
|
||||
print("data=", data)
|
||||
|
||||
async with session.post(url, headers=headers, json=data) as response:
|
||||
status = response.status
|
||||
response_text = await response.text()
|
||||
|
@ -36,7 +51,7 @@ async def chat_completion(session, key, messages, model: Union[str, List] = "gpt
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no_llm_guard_triggered():
|
||||
async def test_llm_guard_triggered_safe_request():
|
||||
"""
|
||||
- Tests a request where no content mod is triggered
|
||||
- Assert that the guardrails applied are returned in the response headers
|
||||
|
@ -80,3 +95,24 @@ async def test_llm_guard_triggered():
|
|||
except Exception as e:
|
||||
print(e)
|
||||
assert "Aporia detected and blocked PII" in str(e)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no_llm_guard_triggered():
|
||||
"""
|
||||
- Tests a request where no content mod is triggered
|
||||
- Assert that the guardrails applied are returned in the response headers
|
||||
"""
|
||||
async with aiohttp.ClientSession() as session:
|
||||
response, headers = await chat_completion(
|
||||
session,
|
||||
"sk-1234",
|
||||
model="fake-openai-endpoint",
|
||||
messages=[{"role": "user", "content": f"Hello what's the weather"}],
|
||||
guardrails=[],
|
||||
)
|
||||
await asyncio.sleep(3)
|
||||
|
||||
print("response=", response, "response headers", headers)
|
||||
|
||||
assert "x-litellm-applied-guardrails" not in headers
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue