From 1c2883a9281d93e76c8e0acf13cead7806b29bbf Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Mon, 23 Oct 2023 07:56:55 -0700 Subject: [PATCH] docs(simple_proxy.md): adding docs on simple deployed proxy server n --- docs/my-website/docs/proxy_server.md | 2 +- docs/my-website/docs/simple_proxy.md | 55 ++++++++++++++++++++++++++++ docs/my-website/sidebars.js | 3 +- 3 files changed, 58 insertions(+), 2 deletions(-) create mode 100644 docs/my-website/docs/simple_proxy.md diff --git a/docs/my-website/docs/proxy_server.md b/docs/my-website/docs/proxy_server.md index 36c4f60ce..1f17a9331 100644 --- a/docs/my-website/docs/proxy_server.md +++ b/docs/my-website/docs/proxy_server.md @@ -1,7 +1,7 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# OpenAI Proxy Server +# Local OpenAI Proxy Server A fast, and lightweight OpenAI-compatible server to call 100+ LLM APIs. diff --git a/docs/my-website/docs/simple_proxy.md b/docs/my-website/docs/simple_proxy.md new file mode 100644 index 000000000..2f7a34d91 --- /dev/null +++ b/docs/my-website/docs/simple_proxy.md @@ -0,0 +1,55 @@ +# OpenAI Proxy Server +A simple, fast, and lightweight **OpenAI-compatible server** to call 100+ LLM APIs. + +:::info +We want to learn how we can make the proxy better! Meet the [founders](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version) or +join our [discord](https://discord.gg/wuPM9dRgDw) +::: + + +## usage + +```shell +$ git clone https://github.com/BerriAI/litellm.git +``` +```shell +$ cd ./litellm/openai-proxy +``` + +```shell +$ uvicorn main:app --host 0.0.0.0 --port 8000 +``` + +## replace openai base +```python +import openai +openai.api_base = "http://0.0.0.0:8000" + +# call cohere +openai.api_key = "my-cohere-key" # this gets passed as a header + +response = openai.ChatCompletion.create(model="command-nightly", messages=[{"role":"user", "content":"Hey!"}]) + +# call bedrock +response = openai.ChatCompletion.create( + model = "bedrock/anthropic.claude-instant-v1", + messages = [ + { + "role": "user", + "content": "Hey!" + } + ], + aws_access_key_id="", + aws_secret_access_key="", + aws_region_name="us-west-2", +) + +print(response) +``` + +[**See how to call Huggingface,Bedrock,TogetherAI,Anthropic, etc.**](https://docs.litellm.ai/docs/proxy_server) + + +:::info +Looking for the CLI tool/local proxy? It's [here](./proxy_server.md) +::: diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index d8903b7ca..d518505eb 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -83,8 +83,8 @@ const sidebars = { "providers/petals", ] }, - "proxy_server", "budget_manager", + "simple_proxy", "routing", "set_keys", "completion/token_usage", @@ -156,6 +156,7 @@ const sidebars = { label: 'Extras', items: [ 'extras/contributing', + "proxy_server", { type: "category", label: "❤️ 🚅 Projects built on LiteLLM",