From 5a39a3554d1fd5295228b79ecf819ec105c9125a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 13 Oct 2023 08:45:17 -0700 Subject: [PATCH] (docs) litellm ollama docker image --- .../litellm-ollama-docker-image/Dockerfile | 25 +++++++++++++ .../requirements.txt | 1 + cookbook/litellm-ollama-docker-image/start.sh | 2 ++ cookbook/litellm-ollama-docker-image/test.py | 36 +++++++++++++++++++ 4 files changed, 64 insertions(+) create mode 100644 cookbook/litellm-ollama-docker-image/Dockerfile create mode 100644 cookbook/litellm-ollama-docker-image/requirements.txt create mode 100644 cookbook/litellm-ollama-docker-image/start.sh create mode 100644 cookbook/litellm-ollama-docker-image/test.py diff --git a/cookbook/litellm-ollama-docker-image/Dockerfile b/cookbook/litellm-ollama-docker-image/Dockerfile new file mode 100644 index 000000000..be237a4df --- /dev/null +++ b/cookbook/litellm-ollama-docker-image/Dockerfile @@ -0,0 +1,25 @@ +FROM ollama/ollama as ollama + +RUN echo "auto installing llama2" + +# auto install ollama/llama2 +RUN ollama serve & sleep 2 && ollama pull llama2 + +RUN echo "installing litellm" + +RUN apt-get update + +# Install Python +RUN apt-get install -y python3 python3-pip + +# Set the working directory in the container +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install any needed packages specified in requirements.txt + +RUN python3 -m pip install litellm +COPY start.sh /start.sh +ENTRYPOINT [ "/bin/bash", "/start.sh" ] diff --git a/cookbook/litellm-ollama-docker-image/requirements.txt b/cookbook/litellm-ollama-docker-image/requirements.txt new file mode 100644 index 000000000..0cd6312fb --- /dev/null +++ b/cookbook/litellm-ollama-docker-image/requirements.txt @@ -0,0 +1 @@ +litellm \ No newline at end of file diff --git a/cookbook/litellm-ollama-docker-image/start.sh b/cookbook/litellm-ollama-docker-image/start.sh new file mode 100644 index 000000000..ecc03ce73 --- /dev/null +++ b/cookbook/litellm-ollama-docker-image/start.sh @@ -0,0 +1,2 @@ +ollama serve & +litellm \ No newline at end of file diff --git a/cookbook/litellm-ollama-docker-image/test.py b/cookbook/litellm-ollama-docker-image/test.py new file mode 100644 index 000000000..d3fb04f16 --- /dev/null +++ b/cookbook/litellm-ollama-docker-image/test.py @@ -0,0 +1,36 @@ + +import openai + +api_base = f"http://0.0.0.0:8000" + +openai.api_base = api_base +openai.api_key = "temp-key" +print(openai.api_base) + + +print(f'LiteLLM: response from proxy with streaming') +response = openai.ChatCompletion.create( + model="ollama/llama2", + messages = [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ], + stream=True +) + +for chunk in response: + print(f'LiteLLM: streaming response from proxy {chunk}') + +response = openai.ChatCompletion.create( + model="ollama/llama2", + messages = [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ] +) + +print(f'LiteLLM: response from proxy {response}')