Misc fixes (#944)

- Make sure torch + torchvision go together as deps, otherwise bad stuff
happens
- Add a pre-commit for requirements.txt
This commit is contained in:
Ashwin Bharambe 2025-02-03 14:08:47 -08:00 committed by GitHub
parent 0f14378135
commit f98efe68c9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 73 additions and 11 deletions

View file

@ -43,6 +43,12 @@ repos:
additional_dependencies:
- black==24.3.0
- repo: https://github.com/astral-sh/uv-pre-commit
rev: 0.5.26
hooks:
- id: uv-export
args: ["--frozen", "--no-hashes", "--no-emit-project"]
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.14.0
# hooks:

View file

@ -87,7 +87,10 @@
"\n",
"!apt-get install -y bubblewrap\n",
"# install a branch of llama stack\n",
"!pip install llama-stack"
"import os\n",
"os.environ[\"UV_SYSTEM_PYTHON\"] = \"1\"\n",
"!pip install uv \n",
"!uv pip install llama-stack"
]
},
{

View file

@ -30,7 +30,7 @@ EMBEDDING_DEPS = [
# we need a better way to do this to identify potential conflicts, etc.
# for now, this lets us significantly reduce the size of the container which
# does not have any "local" inference code (and hence does not need GPU-enabled torch)
"torch --index-url https://download.pytorch.org/whl/cpu",
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
"sentence-transformers --no-deps",
]

53
requirements.txt Normal file
View file

@ -0,0 +1,53 @@
# This file was autogenerated by uv via the following command:
# uv export --frozen --no-hashes --no-emit-project
annotated-types==0.7.0
anyio==4.8.0
blobfile==3.0.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6 ; sys_platform == 'win32'
distro==1.9.0
exceptiongroup==1.2.2 ; python_full_version < '3.11'
filelock==3.17.0
fire==0.7.0
fsspec==2024.12.0
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
huggingface-hub==0.28.1
idna==3.10
jinja2==3.1.5
llama-models==0.1.1
llama-stack-client==0.1.1
lxml==5.3.0
markdown-it-py==3.0.0
markupsafe==3.0.2
mdurl==0.1.2
numpy==2.2.2
packaging==24.2
pandas==2.2.3
pillow==11.1.0
prompt-toolkit==3.0.50
pyaml==25.1.0
pycryptodomex==3.21.0
pydantic==2.10.6
pydantic-core==2.27.2
pygments==2.19.1
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
pytz==2025.1
pyyaml==6.0.2
regex==2024.11.6
requests==2.32.3
rich==13.9.4
setuptools==75.8.0
six==1.17.0
sniffio==1.3.1
termcolor==2.5.0
tiktoken==0.8.0
tqdm==4.67.1
typing-extensions==4.12.2
tzdata==2025.1
urllib3==2.3.0
wcwidth==0.2.13

18
uv.lock generated
View file

@ -624,7 +624,7 @@ wheels = [
[[package]]
name = "llama-models"
version = "0.1.0"
version = "0.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jinja2" },
@ -633,14 +633,14 @@ dependencies = [
{ name = "pyyaml" },
{ name = "tiktoken" },
]
sdist = { url = "https://files.pythonhosted.org/packages/70/13/ae43e78582d2f607bdbf6fed109fec81f33b7e00d121519b39e243fe4b00/llama_models-0.1.0.tar.gz", hash = "sha256:064f2e4659794eefb67c0c839b06befd7bf87b5470acc50e163128e1fabb4332", size = 1551395 }
sdist = { url = "https://files.pythonhosted.org/packages/df/80/4a4595cf5e55f71c0e15b85ff2f4c04b0742bf664ede062a09c9d383bf7b/llama_models-0.1.1.tar.gz", hash = "sha256:7cb5a9fe38485b47aff4c93e183d6d390a676a7619f3355502576b652f17733a", size = 1608412 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/64/a1/63abc002f73098d6129847e0bd32e93bc84890a1beaa3e86263dd80b29b8/llama_models-0.1.0-py3-none-any.whl", hash = "sha256:be191cf7ac614374d48a8d63aa89aec8b86ab3b2ff6bce456d2d353f33e5bd80", size = 1573410 },
{ url = "https://files.pythonhosted.org/packages/d9/93/d49dd0f0cd37df1a7a7fb25444d010f626cdf42b21eea11d839b0f6a808a/llama_models-0.1.1-py3-none-any.whl", hash = "sha256:7e4f15dc4f6f011852ea2c42f9770b75140f5eca670b32cc67fc0a4605c55f89", size = 1650981 },
]
[[package]]
name = "llama-stack"
version = "0.1.0"
version = "0.1.1"
source = { editable = "." }
dependencies = [
{ name = "blobfile" },
@ -676,8 +676,8 @@ requires-dist = [
{ name = "fire" },
{ name = "httpx" },
{ name = "huggingface-hub" },
{ name = "llama-models", specifier = ">=0.1.0" },
{ name = "llama-stack-client", specifier = ">=0.1.0" },
{ name = "llama-models", specifier = ">=0.1.1" },
{ name = "llama-stack-client", specifier = ">=0.1.1" },
{ name = "nbval", marker = "extra == 'dev'" },
{ name = "prompt-toolkit" },
{ name = "pydantic", specifier = ">=2" },
@ -695,7 +695,7 @@ requires-dist = [
[[package]]
name = "llama-stack-client"
version = "0.1.0"
version = "0.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@ -712,9 +712,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2e/bc/44b6d697f3418c8f6dc255167bee9928c045ac7ba7a25af85bffac45a386/llama_stack_client-0.1.0.tar.gz", hash = "sha256:a8092626b915ee03faa8ff7ec6678eb448594a8bfb45713b92e3a0cf5cf26c61", size = 180816 }
sdist = { url = "https://files.pythonhosted.org/packages/07/42/7004958ac1a6da9a8060decf0d9120fdeb3b2775de090a0a473f2ee4a27d/llama_stack_client-0.1.1.tar.gz", hash = "sha256:3e549a848ade959d342fa52ec49b1913b7bb615a77b5b8dcaefe6ff94409049e", size = 179729 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/4e/041a24d1e7d020db8262d54399b6635d43fa7fdb8500c88bc90dfbc22eac/llama_stack_client-0.1.0-py3-none-any.whl", hash = "sha256:a2be50afd33246429da50d2ea7e77f00c007c2a380e2ce0ebb5280501bd4b9e0", size = 348019 },
{ url = "https://files.pythonhosted.org/packages/80/66/5255c09dc001ff437fd6fe6fad27142035b60073df243f7df0494095f605/llama_stack_client-0.1.1-py3-none-any.whl", hash = "sha256:e07d58fdcc1eaa370dd00b94c2dd1a8169c0ac60c37f6f2772cbc2c5b63f2e62", size = 348665 },
]
[[package]]