Make the "all-remote" distribution lightweight in dependencies and size

This commit is contained in:
Ashwin Bharambe 2024-09-24 14:18:57 -07:00
parent 445536de64
commit bda974e660
4 changed files with 65 additions and 18 deletions

View file

@ -8,11 +8,25 @@ from typing import List
from llama_stack.distribution.datatypes import * # noqa: F403
EMBEDDING_DEPS = [
"blobfile",
"chardet",
"pypdf",
"sentence-transformers",
"tqdm",
"numpy",
"scikit-learn",
"scipy",
"nltk",
"sentencepiece",
"transformers",
# this happens to work because special dependencies are always installed last
# so if there was a regular torch installed first, this would be ignored
# we need a better way to do this to identify potential conflicts, etc.
# for now, this lets us significantly reduce the size of the container which
# does not have any "local" inference code (and hence does not need GPU-enabled torch)
"torch --index-url https://download.pytorch.org/whl/cpu",
"sentence-transformers --no-deps",
]