(docs improvement) remove emojis, use guides section, categorize uncategorized docs (#6796)

* proxy - use Setup & Deployment category

* fix emoji

* use guides section to user facing usage

* docs - remove emojis

* use 1 quick start
This commit is contained in:
Ishaan Jaff 2024-11-18 12:23:54 -08:00 committed by GitHub
parent f43768d617
commit bbdec2995a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 75 additions and 26 deletions

View file

@ -2,7 +2,7 @@ import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import Image from '@theme/IdealImage';
# 🐳 Docker, Deployment
# Docker, Deployment
You can find the Dockerfile to build litellm proxy [here](https://github.com/BerriAI/litellm/blob/main/Dockerfile)

View file

@ -1,3 +1,7 @@
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# Getting Started - E2E Tutorial
End-to-End tutorial for LiteLLM Proxy to:
@ -9,7 +13,11 @@ End-to-End tutorial for LiteLLM Proxy to:
## Pre-Requisites
- Install LiteLLM Docker Image
- Install LiteLLM Docker Image ** OR ** LiteLLM CLI (pip package)
<Tabs>
<TabItem value="docker" label="Docker">
```
docker pull ghcr.io/berriai/litellm:main-latest
@ -17,6 +25,18 @@ docker pull ghcr.io/berriai/litellm:main-latest
[**See all docker images**](https://github.com/orgs/BerriAI/packages)
</TabItem>
<TabItem value="pip" label="LiteLLM CLI (pip package)">
```shell
$ pip install 'litellm[proxy]'
```
</TabItem>
</Tabs>
## 1. Add a model
Control LiteLLM Proxy with a config.yaml file.
@ -58,6 +78,11 @@ LiteLLM Proxy is 100% OpenAI-compatible. Test your azure model via the `/chat/co
Save your config.yaml from step 1. as `litellm_config.yaml`.
<Tabs>
<TabItem value="docker" label="Docker">
```bash
docker run \
-v $(pwd)/litellm_config.yaml:/app/config.yaml \
@ -70,6 +95,20 @@ docker run \
# RUNNING on http://0.0.0.0:4000
```
</TabItem>
<TabItem value="pip" label="LiteLLM CLI (pip package)">
```shell
$ litellm --config /app/config.yaml --detailed_debug
```
</TabItem>
</Tabs>
Confirm your config.yaml got mounted correctly
```bash

View file

@ -1,5 +1,5 @@
# IP Address Filtering
# IP Address Filtering
:::info

View file

@ -1,7 +1,7 @@
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# 🔑 Virtual Keys
# Virtual Keys
Track Spend, and control model access via virtual keys for the proxy
:::info

View file

@ -20,19 +20,29 @@ const sidebars = {
{ type: "doc", id: "index" }, // NEW
{
type: "category",
label: "💥 LiteLLM Proxy Server",
label: "LiteLLM Proxy Server",
link: {
type: "generated-index",
title: "💥 LiteLLM Proxy Server (LLM Gateway)",
title: "LiteLLM Proxy Server (LLM Gateway)",
description: `OpenAI Proxy Server (LLM Gateway) to call 100+ LLMs in a unified interface & track spend, set budgets per virtual key/user`,
slug: "/simple_proxy",
},
items: [
"proxy/quick_start",
"proxy/docker_quick_start",
"proxy/deploy",
"proxy/docker_quick_start",
{
type: "category",
label: "Setup & Deployment",
items: [
"proxy/deploy",
"proxy/prod",
"proxy/configs",
"proxy/cli",
"proxy/model_management",
"proxy/health",
"proxy/debugging",
],
},
"proxy/demo",
"proxy/prod",
{
type: "category",
label: "Architecture",
@ -45,17 +55,24 @@ const sidebars = {
},
"proxy/enterprise",
"proxy/user_keys",
"proxy/configs",
"proxy/response_headers",
"proxy/reliability",
{
type: "category",
label: "🔑 Authentication",
items: ["proxy/virtual_keys", "proxy/token_auth", "proxy/service_accounts", "proxy/access_control","proxy/ip_address"],
label: "Authentication",
items: [
"proxy/virtual_keys",
"proxy/token_auth",
"proxy/service_accounts",
"proxy/access_control",
"proxy/ip_address",
"proxy/email",
"proxy/multiple_admins",
],
},
{
type: "category",
label: "💸 Spend Tracking + Budgets",
label: "Spend Tracking + Budgets",
items: ["proxy/cost_tracking", "proxy/users", "proxy/custom_pricing", "proxy/team_budgets", "proxy/billing", "proxy/customers"],
},
{
@ -91,7 +108,7 @@ const sidebars = {
},
{
type: "category",
label: "🛡️ [Beta] Guardrails",
label: "[Beta] Guardrails",
items: [
"proxy/guardrails/quick_start",
"proxy/guardrails/aporia_api",
@ -114,19 +131,13 @@ const sidebars = {
},
"proxy/caching",
"proxy/pass_through",
"proxy/email",
"proxy/multiple_admins",
"proxy/model_management",
"proxy/health",
"proxy/debugging",
"proxy/call_hooks",
"proxy/rules",
"proxy/cli",
"proxy/rules",
]
},
{
type: "category",
label: "💯 Supported Models & Providers",
label: "Supported Models & Providers",
link: {
type: "generated-index",
title: "Providers",
@ -183,7 +194,6 @@ const sidebars = {
"providers/openrouter",
"providers/palm",
"providers/sambanova",
// "providers/custom_openai_proxy",
"providers/custom_llm_server",
"providers/petals",
@ -191,7 +201,7 @@ const sidebars = {
},
{
type: "category",
label: "Chat Completions (litellm.completion + PROXY)",
label: "Guides",
link: {
type: "generated-index",
title: "Chat Completions",
@ -245,7 +255,7 @@ const sidebars = {
"scheduler",
{
type: "category",
label: "🚅 LiteLLM Python SDK",
label: "LiteLLM Python SDK",
items: [
"set_keys",
"completion/token_usage",