diff --git a/deploy/Dockerfile.ghcr_base b/deploy/Dockerfile.ghcr_base new file mode 100644 index 000000000..691f08272 --- /dev/null +++ b/deploy/Dockerfile.ghcr_base @@ -0,0 +1,17 @@ +# Use the provided base image +FROM ghcr.io/berriai/litellm:main-latest + +# Set the working directory to /app +WORKDIR /app + +# Copy the configuration file into the container at /app +COPY config.yaml . + +# Make sure your entrypoint.sh is executable +RUN chmod +x entrypoint.sh + +# Expose the necessary port +EXPOSE 4000/tcp + +# Override the CMD instruction with your desired command and arguments +CMD ["--port", "4000", "--config", "config.yaml", "--detailed_debug", "--run_gunicorn"] diff --git a/docker-compose.yml b/docker-compose.yml index 36d5d3976..814677735 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,8 +8,6 @@ services: - "4000:4000" environment: - AZURE_API_KEY=sk-123 - litellm-ui: - image: ghcr.io/berriai/litellm-ui:main-latest diff --git a/docs/my-website/docs/proxy/deploy.md b/docs/my-website/docs/proxy/deploy.md index 81960a2a0..c3a84f1ea 100644 --- a/docs/my-website/docs/proxy/deploy.md +++ b/docs/my-website/docs/proxy/deploy.md @@ -7,6 +7,10 @@ You can find the Dockerfile to build litellm proxy [here](https://github.com/Ber ## Quick Start + + + + See the latest available ghcr docker image here: https://github.com/berriai/litellm/pkgs/container/litellm @@ -18,6 +22,12 @@ docker pull ghcr.io/berriai/litellm:main-latest docker run ghcr.io/berriai/litellm:main-latest ``` + + + + + + ### Run with LiteLLM CLI args See all supported CLI args [here](https://docs.litellm.ai/docs/proxy/cli): @@ -32,6 +42,34 @@ Here's how you can run the docker image and start litellm on port 8002 with `num docker run ghcr.io/berriai/litellm:main-latest --port 8002 --num_workers 8 ``` + + + + +```shell +# Use the provided base image +FROM ghcr.io/berriai/litellm:main-latest + +# Set the working directory to /app +WORKDIR /app + +# Copy the configuration file into the container at /app +COPY config.yaml . + +# Make sure your entrypoint.sh is executable +RUN chmod +x entrypoint.sh + +# Expose the necessary port +EXPOSE 4000/tcp + +# Override the CMD instruction with your desired command and arguments +CMD ["--port", "4000", "--config", "config.yaml", "--detailed_debug", "--run_gunicorn"] +``` + + + + + ## Deploy with Database We maintain a [seperate Dockerfile](https://github.com/BerriAI/litellm/pkgs/container/litellm-database) for reducing build time when running LiteLLM proxy with a connected Postgres Database