From eb6a0a32f1db205c77f229ea54a6a92ad738aeb3 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 14 Aug 2024 22:11:19 -0700 Subject: [PATCH] docs(bedrock.md): add guardrails on config.yaml to docs --- docs/my-website/docs/providers/bedrock.md | 51 ++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/docs/my-website/docs/providers/bedrock.md b/docs/my-website/docs/providers/bedrock.md index 485dbf892..907dfc233 100644 --- a/docs/my-website/docs/providers/bedrock.md +++ b/docs/my-website/docs/providers/bedrock.md @@ -393,7 +393,7 @@ response = completion( ) ``` - + ```python @@ -420,6 +420,55 @@ extra_body={ } ) +print(response) +``` + + + +1. Update config.yaml + +```yaml +model_list: + - model_name: bedrock-claude-v1 + litellm_params: + model: bedrock/anthropic.claude-instant-v1 + aws_access_key_id: os.environ/CUSTOM_AWS_ACCESS_KEY_ID + aws_secret_access_key: os.environ/CUSTOM_AWS_SECRET_ACCESS_KEY + aws_region_name: os.environ/CUSTOM_AWS_REGION_NAME + guardrailConfig: { + "guardrailIdentifier": "ff6ujrregl1q", # The identifier (ID) for the guardrail. + "guardrailVersion": "DRAFT", # The version of the guardrail. + "trace": "disabled", # The trace behavior for the guardrail. Can either be "disabled" or "enabled" + } + +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```python + +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="bedrock-claude-v1", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } +], +temperature=0.7 +) + print(response) ```