This commit is contained in:
Zackeus Bengtsson 2025-04-24 01:02:00 -07:00 committed by GitHub
commit 2e2c7da63a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 10 additions and 2 deletions

View file

@ -38,6 +38,10 @@ RUN pip install dist/*.whl
# install dependencies as wheels
RUN pip wheel --no-cache-dir --wheel-dir=/wheels/ -r requirements.txt
# Download tiktoken for offline usage of image, see https://stackoverflow.com/a/76107077. Let tiktoken itself download the file, to ensure correct naming
ENV TIKTOKEN_CACHE_DIR="/tiktoken"
RUN mkdir /tiktoken && python -c "import tiktoken; tiktoken.get_encoding('cl100k_base'); print('tiktoken imported successfully')"
# Runtime stage
FROM $LITELLM_RUNTIME_IMAGE AS runtime
@ -53,6 +57,10 @@ RUN ls -la /app
COPY --from=builder /app/dist/*.whl .
COPY --from=builder /wheels/ /wheels/
# Copy tiktoken from build stage, and set env variable to stop tiktoken from downloading file
COPY --from=builder /tiktoken /tiktoken
ENV CUSTOM_TIKTOKEN_CACHE_DIR="/tiktoken"
# Install the built wheel using pip; again using a wildcard if it's the only file
RUN pip install *.whl /wheels/* --no-index --find-links=/wheels/ && rm -f *.whl && rm -rf /wheels

View file

@ -605,9 +605,9 @@ ghcr.io/berriai/litellm-database:main-latest --config your_config.yaml
### (Non Root) - without Internet Connection
By default `prisma generate` downloads [prisma's engine binaries](https://www.prisma.io/docs/orm/reference/environment-variables-reference#custom-engine-file-locations). This might cause errors when running without internet connection.
By default `prisma generate` downloads [prisma's engine binaries](https://www.prisma.io/docs/orm/reference/environment-variables-reference#custom-engine-file-locations). LiteLLM also uses `tiktoken` for tracking the number of tokens in a given user input (for openai models). This might cause errors when running without internet connection.
Use this docker image to deploy litellm with pre-generated prisma binaries.
Use this docker image to deploy litellm with pre-generated prisma binaries, and pre-downloaded tiktoken tokenizer files.
```bash
docker pull ghcr.io/berriai/litellm-non_root:main-stable