From 918e4fcfe5c3795d0b14d40d483e94d3248aa365 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Fri, 23 Aug 2024 12:01:43 -0700 Subject: [PATCH] feat add test for custom guardrails --- litellm/proxy/proxy_config.yaml | 7 ++++--- tests/otel_tests/test_guardrails.py | 21 +++++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index acb792aec..6be2454a2 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -1,8 +1,9 @@ model_list: - - model_name: gpt-4 + - model_name: fake-openai-endpoint litellm_params: - model: openai/gpt-4o - api_key: os.environ/OPENAI_API_KEY + model: openai/fake + api_key: fake-key + api_base: https://exampleopenaiendpoint-production.up.railway.app/ guardrails: - guardrail_name: "custom-pre-guard" diff --git a/tests/otel_tests/test_guardrails.py b/tests/otel_tests/test_guardrails.py index 34f14186e..2b5bfc644 100644 --- a/tests/otel_tests/test_guardrails.py +++ b/tests/otel_tests/test_guardrails.py @@ -217,3 +217,24 @@ async def test_bedrock_guardrail_triggered(): print(e) assert "GUARDRAIL_INTERVENED" in str(e) assert "Violated guardrail policy" in str(e) + + +@pytest.mark.asyncio +async def test_custom_guardrail_during_call_triggered(): + """ + - Tests a request where our bedrock guardrail should be triggered + - Assert that the guardrails applied are returned in the response headers + """ + async with aiohttp.ClientSession() as session: + try: + response, headers = await chat_completion( + session, + "sk-1234", + model="fake-openai-endpoint", + messages=[{"role": "user", "content": f"Hello do you like litellm?"}], + guardrails=["custom-during-guard"], + ) + pytest.fail("Should have thrown an exception") + except Exception as e: + print(e) + assert "Guardrail failed words - `litellm` detected" in str(e)