From 3cc23048d51a780dd7af2f7cb2375ff10132d9df Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 11 Oct 2023 08:27:25 -0700 Subject: [PATCH] docs(proxy_server.md): add docker image details to docs --- docs/my-website/docs/proxy_server.md | 14 +++++++++++++- litellm/proxy/proxy_cli.py | 16 ++++++++-------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/my-website/docs/proxy_server.md b/docs/my-website/docs/proxy_server.md index 51d44e4786..ad70a3ae85 100644 --- a/docs/my-website/docs/proxy_server.md +++ b/docs/my-website/docs/proxy_server.md @@ -240,7 +240,7 @@ task = Task(agent, name="my-llm-task") task.run() ``` -Credits [@pchalasani](https://github.com/pchalasani) for this tutorial. +Credits [@pchalasani](https://github.com/pchalasani) and [Langroid](https://github.com/langroid/langroid) for this tutorial. @@ -323,6 +323,18 @@ This will return your logs from `~/.ollama/logs/server.log`. ### Deploy Proxy + +Use this to deploy local models with Ollama that's OpenAI-compatible. + +It works for models like Mistral, Llama2, CodeLlama, etc. (any model supported by [Ollama](https://ollama.ai/library)) + +**usage** +```shell +docker run --name ollama litellm/ollama +``` + +More details 👉 https://hub.docker.com/r/litellm/ollama + **Step 1: Clone the repo** diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index fe188b3859..67f39fbf63 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -7,14 +7,14 @@ load_dotenv() from importlib import resources import shutil, random list_of_messages = [ - "The thing I wish you improved is...:", - "A feature I really want is...:", - "The worst thing about this product is...:", - "This product would be better if...:", - "I don't like how this works...:", - "It would help me if you could add...:", - "This feature doesn't meet my needs because...:", - "I get frustrated when the product...:", + "'The thing I wish you improved is...'", + "'A feature I really want is...'", + "'The worst thing about this product is...'", + "'This product would be better if...'", + "'I don't like how this works...'", + "'It would help me if you could add...'", + "'This feature doesn't meet my needs because...'", + "'I get frustrated when the product...'", ] def generate_feedback_box():