diff --git a/docs/my-website/docs/providers/anthropic.md b/docs/my-website/docs/providers/anthropic.md
index 0520e4ef80..85628e8f73 100644
--- a/docs/my-website/docs/providers/anthropic.md
+++ b/docs/my-website/docs/providers/anthropic.md
@@ -270,7 +270,45 @@ response = await litellm.acompletion(
```
-
+
+
+```python
+import openai
+client = openai.AsyncOpenAI(
+ api_key="anything", # litellm proxy api key
+ base_url="http://0.0.0.0:4000" # litellm proxy base url
+)
+
+
+response = await client.chat.completions.create(
+ model="anthropic/claude-3-5-sonnet-20240620",
+ messages=[
+ {
+ "role": "system",
+ "content": [
+ {
+ "type": "text",
+ "text": "You are an AI assistant tasked with analyzing legal documents.",
+ },
+ {
+ "type": "text",
+ "text": "Here is the full text of a complex legal agreement",
+ "cache_control": {"type": "ephemeral"},
+ },
+ ],
+ },
+ {
+ "role": "user",
+ "content": "what are the key terms and conditions in this agreement?",
+ },
+ ],
+ extra_headers={
+ "anthropic-version": "2023-06-01",
+ "anthropic-beta": "prompt-caching-2024-07-31",
+ },
+)
+
+```
@@ -318,7 +356,45 @@ response = await litellm.acompletion(
)
```
-
+
+
+```python
+import openai
+client = openai.AsyncOpenAI(
+ api_key="anything", # litellm proxy api key
+ base_url="http://0.0.0.0:4000" # litellm proxy base url
+)
+
+response = await client.chat.completions.create(
+ model="anthropic/claude-3-5-sonnet-20240620",
+ messages = [{"role": "user", "content": "What's the weather like in Boston today?"}]
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_current_weather",
+ "description": "Get the current weather in a given location",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "The city and state, e.g. San Francisco, CA",
+ },
+ "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
+ },
+ "required": ["location"],
+ },
+ "cache_control": {"type": "ephemeral"}
+ },
+ }
+ ],
+ extra_headers={
+ "anthropic-version": "2023-06-01",
+ "anthropic-beta": "prompt-caching-2024-07-31",
+ },
+)
+```
@@ -387,7 +463,64 @@ response = await litellm.acompletion(
)
```
-
+
+
+
+```python
+import openai
+client = openai.AsyncOpenAI(
+ api_key="anything", # litellm proxy api key
+ base_url="http://0.0.0.0:4000" # litellm proxy base url
+)
+
+response = await client.chat.completions.create(
+ model="anthropic/claude-3-5-sonnet-20240620",
+ messages=[
+ # System Message
+ {
+ "role": "system",
+ "content": [
+ {
+ "type": "text",
+ "text": "Here is the full text of a complex legal agreement"
+ * 400,
+ "cache_control": {"type": "ephemeral"},
+ }
+ ],
+ },
+ # marked for caching with the cache_control parameter, so that this checkpoint can read from the previous cache.
+ {
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": "What are the key terms and conditions in this agreement?",
+ "cache_control": {"type": "ephemeral"},
+ }
+ ],
+ },
+ {
+ "role": "assistant",
+ "content": "Certainly! the key terms and conditions are the following: the contract is 1 year long for $10/mo",
+ },
+ # The final turn is marked with cache-control, for continuing in followups.
+ {
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": "What are the key terms and conditions in this agreement?",
+ "cache_control": {"type": "ephemeral"},
+ }
+ ],
+ },
+ ],
+ extra_headers={
+ "anthropic-version": "2023-06-01",
+ "anthropic-beta": "prompt-caching-2024-07-31",
+ },
+)
+```