From 50b741f8fae27470595b6ba93572d82559e16a9d Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Sat, 16 Dec 2023 16:01:02 -0800 Subject: [PATCH] fix(Dockerfile): support mac --- Dockerfile | 34 +++++++++++++++++----------------- litellm/router.py | 2 +- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1a59db16a..c9dba6615 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,17 +1,17 @@ -# Base image +# Base image for building ARG LITELLM_BUILD_IMAGE=python:3.9 # Runtime image ARG LITELLM_RUNTIME_IMAGE=python:3.9-slim -# allow users to specify, else use python 3.9 +# Builder stage FROM $LITELLM_BUILD_IMAGE as builder # Set the working directory to /app WORKDIR /app # Install build dependencies -RUN apt-get update && \ +RUN apt-get clean && apt-get update && \ apt-get install -y gcc python3-dev && \ rm -rf /var/lib/apt/lists/* @@ -31,26 +31,26 @@ RUN pip install dist/*.whl RUN pip install wheel && \ pip wheel --no-cache-dir --wheel-dir=/app/wheels -r requirements.txt -############################################################################### +# Clear out any existing builds and build the package +RUN rm -rf dist/* && python -m build + +# There should be only one wheel file now, assume the build only creates one +RUN ls -1 dist/*.whl | head -1 + +# Runtime stage FROM $LITELLM_RUNTIME_IMAGE as runtime WORKDIR /app -# Copy the current directory contents into the container at /app -COPY . . +# Depending on wheel naming patterns, use a wildcard if multiple versions are possible +# Copy the built wheel from the builder stage to the runtime stage; assumes only one wheel file is present +COPY --from=builder /app/dist/*.whl . -COPY --from=builder /app/wheels /app/wheels - -RUN pip install --no-index --find-links=/app/wheels -r requirements.txt - -# Trigger the Prisma CLI to be installed -RUN prisma -v +# Install the built wheel using pip; again using a wildcard if it's the only file +RUN pip install *.whl && rm -f *.whl EXPOSE 4000/tcp -# Start the litellm proxy, using the `litellm` cli command https://docs.litellm.ai/docs/simple_proxy -# Start the litellm proxy with default options -CMD ["--port", "4000"] - -# Allow users to override the CMD when running the container, allows users to pass litellm args +# Set your entrypoint and command ENTRYPOINT ["litellm"] +CMD ["--port", "4000"] \ No newline at end of file diff --git a/litellm/router.py b/litellm/router.py index 1b8dc1172..410d4964e 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -964,7 +964,7 @@ class Router: if "azure" in model_name: if api_base is None: - raise ValueError("api_base is required for Azure OpenAI. Set it on your config") + raise ValueError(f"api_base is required for Azure OpenAI. Set it on your config. Model - {model}") if api_version is None: api_version = "2023-07-01-preview" if "gateway.ai.cloudflare.com" in api_base: