From 225d0fb703a606c47d1ba23f764a3e6e88a94e0d Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 14 Sep 2023 13:46:12 -0700 Subject: [PATCH] add usage example to sagemaker docs --- docs/my-website/docs/providers/aws_sagemaker.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/my-website/docs/providers/aws_sagemaker.md b/docs/my-website/docs/providers/aws_sagemaker.md index 485601e3d..4c9c4f1ec 100644 --- a/docs/my-website/docs/providers/aws_sagemaker.md +++ b/docs/my-website/docs/providers/aws_sagemaker.md @@ -10,6 +10,23 @@ os.environ["AWS_SECRET_ACCESS_KEY"] = "" os.environ["AWS_REGION_NAME"] = "" ``` +### Usage +```python +import os +from litellm import completion + +os.environ["AWS_ACCESS_KEY_ID"] = "" +os.environ["AWS_SECRET_ACCESS_KEY"] = "" +os.environ["AWS_REGION_NAME"] = "" + +response = completion( + model="sagemaker/jumpstart-dft-meta-textgeneration-llama-2-7b", + messages=[{ "content": "Hello, how are you?","role": "user"}], + temperature=0.2, + max_tokens=80 + ) +``` + ### AWS Sagemaker Models Here's an example of using a sagemaker model with LiteLLM