(feat) - allow building litellm proxy from pip package (#7633)

* fix working build from pip

* add tests for proxy_build_from_pip_tests

* doc clean up for deployment

* docs cleanup

* docs build from pip

* fix cd docker/build_from_pip
This commit is contained in:
Ishaan Jaff 2025-01-08 16:36:57 -08:00 committed by GitHub
parent 43566e9842
commit fd0a03f719
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 248 additions and 49 deletions

View file

@ -0,0 +1,23 @@
FROM cgr.dev/chainguard/python:latest-dev
USER root
WORKDIR /app
ENV HOME=/home/litellm
ENV PATH="${HOME}/venv/bin:$PATH"
# Install runtime dependencies
RUN apk update && \
apk add --no-cache gcc python3-dev openssl openssl-dev
RUN python -m venv ${HOME}/venv
RUN ${HOME}/venv/bin/pip install --no-cache-dir --upgrade pip
COPY requirements.txt .
RUN --mount=type=cache,target=${HOME}/.cache/pip \
${HOME}/venv/bin/pip install -r requirements.txt
EXPOSE 4000/tcp
ENTRYPOINT ["litellm"]
CMD ["--port", "4000"]

View file

@ -0,0 +1,9 @@
# Docker to build LiteLLM Proxy from litellm pip package
### When to use this ?
If you need to build LiteLLM Proxy from litellm pip package, you can use this Dockerfile as a reference.
### Why build from pip package ?
- If your company has a strict requirement around security / building images you can follow steps outlined here

View file

@ -0,0 +1,9 @@
model_list:
- model_name: "gpt-4"
litellm_params:
model: openai/fake
api_key: fake-key
api_base: https://exampleopenaiendpoint-production.up.railway.app/
general_settings:
alerting: ["slack"]

View file

@ -0,0 +1,4 @@
litellm[proxy]==1.57.3 # Specify the litellm version you want to use
prometheus_client
langfuse
prisma