mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-29 11:24:19 +00:00
Misc fixes (#944)
- Make sure torch + torchvision go together as deps, otherwise bad stuff happens - Add a pre-commit for requirements.txt
This commit is contained in:
parent
0f14378135
commit
f98efe68c9
5 changed files with 73 additions and 11 deletions
|
@ -30,7 +30,7 @@ EMBEDDING_DEPS = [
|
|||
# we need a better way to do this to identify potential conflicts, etc.
|
||||
# for now, this lets us significantly reduce the size of the container which
|
||||
# does not have any "local" inference code (and hence does not need GPU-enabled torch)
|
||||
"torch --index-url https://download.pytorch.org/whl/cpu",
|
||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
|
||||
"sentence-transformers --no-deps",
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue