From 19a4973f4be2341bd4dc9bc652e5061721aa5566 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Sun, 13 Oct 2024 18:34:09 -0700 Subject: [PATCH] docs: make it easier to find anthropic/openai prompt caching doc --- docs/my-website/docs/caching/all_caches.md | 5 ++++- docs/my-website/docs/proxy/caching.md | 6 ++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/my-website/docs/caching/all_caches.md b/docs/my-website/docs/caching/all_caches.md index 8b4d7e863d..d6ccb98a2f 100644 --- a/docs/my-website/docs/caching/all_caches.md +++ b/docs/my-website/docs/caching/all_caches.md @@ -7,7 +7,10 @@ import TabItem from '@theme/TabItem'; :::info -Need to use Caching on LiteLLM Proxy Server? Doc here: [Caching Proxy Server](https://docs.litellm.ai/docs/proxy/caching) +- For Proxy Server? Doc here: [Caching Proxy Server](https://docs.litellm.ai/docs/proxy/caching) + +- For OpenAI/Anthropic Prompt Caching, go [here](../completion/prompt_caching.md) + ::: diff --git a/docs/my-website/docs/proxy/caching.md b/docs/my-website/docs/proxy/caching.md index 4d44a4da02..533d1bd9f9 100644 --- a/docs/my-website/docs/proxy/caching.md +++ b/docs/my-website/docs/proxy/caching.md @@ -4,6 +4,12 @@ import TabItem from '@theme/TabItem'; # Caching Cache LLM Responses +:::note + +For OpenAI/Anthropic Prompt Caching, go [here](../completion/prompt_caching.md) + +::: + LiteLLM supports: - In Memory Cache - Redis Cache