mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-06 10:42:39 +00:00
Make sure torch + torchvision go together as deps
This commit is contained in:
parent
0f14378135
commit
bc862f1d25
1 changed files with 1 additions and 1 deletions
|
@ -30,7 +30,7 @@ EMBEDDING_DEPS = [
|
|||
# we need a better way to do this to identify potential conflicts, etc.
|
||||
# for now, this lets us significantly reduce the size of the container which
|
||||
# does not have any "local" inference code (and hence does not need GPU-enabled torch)
|
||||
"torch --index-url https://download.pytorch.org/whl/cpu",
|
||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
|
||||
"sentence-transformers --no-deps",
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue