From b2b505611c5a7fac873bf807574c266153df4187 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Thu, 8 Aug 2024 17:00:52 -0700 Subject: [PATCH] docs use (LLM Gateway) in some places --- docs/my-website/docs/index.md | 2 +- docs/my-website/docs/proxy/quick_start.md | 2 +- docs/my-website/sidebars.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/my-website/docs/index.md b/docs/my-website/docs/index.md index 2417b0cff..4c1cdd017 100644 --- a/docs/my-website/docs/index.md +++ b/docs/my-website/docs/index.md @@ -14,7 +14,7 @@ https://github.com/BerriAI/litellm ## How to use LiteLLM You can use litellm through either: -1. [LiteLLM Proxy Server](#openai-proxy) - Server to call 100+ LLMs, load balance, cost tracking across projects +1. [LiteLLM Proxy Server](#openai-proxy) - Server (LLM Gateway) to call 100+ LLMs, load balance, cost tracking across projects 2. [LiteLLM python SDK](#basic-usage) - Python Client to call 100+ LLMs, load balance, cost tracking ### When to use LiteLLM Proxy Server diff --git a/docs/my-website/docs/proxy/quick_start.md b/docs/my-website/docs/proxy/quick_start.md index 9da860b0d..2049f6bb2 100644 --- a/docs/my-website/docs/proxy/quick_start.md +++ b/docs/my-website/docs/proxy/quick_start.md @@ -5,7 +5,7 @@ import TabItem from '@theme/TabItem'; # Quick Start Quick start CLI, Config, Docker -LiteLLM Server manages: +LiteLLM Server (LLM Gateway) manages: * **Unified Interface**: Calling 100+ LLMs [Huggingface/Bedrock/TogetherAI/etc.](#other-supported-models) in the OpenAI `ChatCompletions` & `Completions` format * **Cost tracking**: Authentication, Spend Tracking & Budgets [Virtual Keys](https://docs.litellm.ai/docs/proxy/virtual_keys) diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index f84a43c56..58eebdc2c 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -24,7 +24,7 @@ const sidebars = { link: { type: "generated-index", title: "💥 LiteLLM Proxy Server", - description: `OpenAI Proxy Server to call 100+ LLMs in a unified interface & track spend, set budgets per virtual key/user`, + description: `OpenAI Proxy Server (LLM Gateway) to call 100+ LLMs in a unified interface & track spend, set budgets per virtual key/user`, slug: "/simple_proxy", }, items: [