(feat) Support Dynamic Params for guardrails (#7415)

* update CustomGuardrail

* unit test custom guardrails

* add dynamic params for aporia

* add dynamic params to bedrock guard

* add dynamic params for all guardrails

* fix linting

* fix should_run_guardrail

* _validate_premium_user

* update guardrail doc

* doc update

* update code q

* should_run_guardrail
This commit is contained in:
Ishaan Jaff 2024-12-25 16:07:29 -08:00 committed by GitHub
parent 77fa751639
commit 0ce5f9fe58
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 411 additions and 21 deletions

View file

@ -48,10 +48,13 @@ class GuardrailsAI(CustomGuardrail):
supported_event_hooks = [GuardrailEventHooks.post_call]
super().__init__(supported_event_hooks=supported_event_hooks, **kwargs)
async def make_guardrails_ai_api_request(self, llm_output: str):
async def make_guardrails_ai_api_request(self, llm_output: str, request_data: dict):
from httpx import URL
data = {"llmOutput": llm_output}
data = {
"llmOutput": llm_output,
**self.get_guardrail_dynamic_request_body_params(request_data=request_data),
}
_json_data = json.dumps(data)
response = await litellm.module_level_aclient.post(
url=str(
@ -96,7 +99,9 @@ class GuardrailsAI(CustomGuardrail):
response_str: str = get_content_from_model_response(response)
if response_str is not None and len(response_str) > 0:
await self.make_guardrails_ai_api_request(llm_output=response_str)
await self.make_guardrails_ai_api_request(
llm_output=response_str, request_data=data
)
add_guardrail_to_applied_guardrails_header(
request_data=data, guardrail_name=self.guardrail_name