make uv-export actually work?

This commit is contained in:
Ashwin Bharambe 2025-02-14 08:53:56 -08:00
parent 74190ee310
commit a9890559b7
2 changed files with 8 additions and 110 deletions

View file

@ -43,15 +43,14 @@ repos:
rev: 0.5.26 rev: 0.5.26
hooks: hooks:
- id: uv-export - id: uv-export
args: ["--frozen", "--no-hashes", "--no-emit-project"]
- id: uv-sync
- id: pip-compile
args: [ args: [
"pyproject.toml", "--frozen",
"--output-file", "--no-hashes",
"requirements.txt" "--no-emit-project",
"--output-file=requirements.txt"
] ]
files: ^pyproject\.toml$ files: ^pyproject\.toml$
- id: uv-sync
# - repo: https://github.com/pre-commit/mirrors-mypy # - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.14.0 # rev: v1.14.0

View file

@ -1,159 +1,58 @@
# This file was autogenerated by uv via the following command: # This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml --output-file requirements.txt # uv export --frozen --no-hashes --no-emit-project
annotated-types==0.7.0 annotated-types==0.7.0
# via pydantic
anyio==4.8.0 anyio==4.8.0
# via
# httpx
# llama-stack-client
attrs==25.1.0 attrs==25.1.0
# via
# jsonschema
# referencing
blobfile==3.0.0 blobfile==3.0.0
# via llama-stack (pyproject.toml)
certifi==2025.1.31 certifi==2025.1.31
# via
# httpcore
# httpx
# requests
charset-normalizer==3.4.1 charset-normalizer==3.4.1
# via requests
click==8.1.8 click==8.1.8
# via llama-stack-client colorama==0.4.6 ; sys_platform == 'win32'
distro==1.9.0 distro==1.9.0
# via llama-stack-client exceptiongroup==1.2.2 ; python_full_version < '3.11'
exceptiongroup==1.2.2
# via anyio
filelock==3.17.0 filelock==3.17.0
# via
# blobfile
# huggingface-hub
fire==0.7.0 fire==0.7.0
# via llama-stack (pyproject.toml)
fsspec==2024.12.0 fsspec==2024.12.0
# via huggingface-hub
h11==0.14.0 h11==0.14.0
# via httpcore
httpcore==1.0.7 httpcore==1.0.7
# via httpx
httpx==0.28.1 httpx==0.28.1
# via
# llama-stack (pyproject.toml)
# llama-stack-client
huggingface-hub==0.28.1 huggingface-hub==0.28.1
# via llama-stack (pyproject.toml)
idna==3.10 idna==3.10
# via
# anyio
# httpx
# requests
jinja2==3.1.5 jinja2==3.1.5
# via llama-models
jsonschema==4.23.0 jsonschema==4.23.0
# via llama-stack (pyproject.toml)
jsonschema-specifications==2024.10.1 jsonschema-specifications==2024.10.1
# via jsonschema
llama-models==0.1.2 llama-models==0.1.2
# via llama-stack (pyproject.toml)
llama-stack-client==0.1.2 llama-stack-client==0.1.2
# via llama-stack (pyproject.toml)
lxml==5.3.0 lxml==5.3.0
# via blobfile
markdown-it-py==3.0.0 markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2 markupsafe==3.0.2
# via jinja2
mdurl==0.1.2 mdurl==0.1.2
# via markdown-it-py
numpy==2.2.2 numpy==2.2.2
# via pandas
packaging==24.2 packaging==24.2
# via huggingface-hub
pandas==2.2.3 pandas==2.2.3
# via llama-stack-client
pillow==11.1.0 pillow==11.1.0
# via llama-models
prompt-toolkit==3.0.50 prompt-toolkit==3.0.50
# via
# llama-stack (pyproject.toml)
# llama-stack-client
pyaml==25.1.0 pyaml==25.1.0
# via llama-stack-client
pycryptodomex==3.21.0 pycryptodomex==3.21.0
# via blobfile
pydantic==2.10.6 pydantic==2.10.6
# via
# llama-stack (pyproject.toml)
# llama-models
# llama-stack-client
pydantic-core==2.27.2 pydantic-core==2.27.2
# via pydantic
pygments==2.19.1 pygments==2.19.1
# via rich
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1 python-dotenv==1.0.1
# via llama-stack (pyproject.toml)
pytz==2025.1 pytz==2025.1
# via pandas
pyyaml==6.0.2 pyyaml==6.0.2
# via
# huggingface-hub
# llama-models
# pyaml
referencing==0.36.2 referencing==0.36.2
# via
# jsonschema
# jsonschema-specifications
regex==2024.11.6 regex==2024.11.6
# via tiktoken
requests==2.32.3 requests==2.32.3
# via
# llama-stack (pyproject.toml)
# huggingface-hub
# tiktoken
rich==13.9.4 rich==13.9.4
# via
# llama-stack (pyproject.toml)
# llama-stack-client
rpds-py==0.22.3 rpds-py==0.22.3
# via
# jsonschema
# referencing
setuptools==75.8.0 setuptools==75.8.0
# via llama-stack (pyproject.toml)
six==1.17.0 six==1.17.0
# via python-dateutil
sniffio==1.3.1 sniffio==1.3.1
# via
# anyio
# llama-stack-client
termcolor==2.5.0 termcolor==2.5.0
# via
# llama-stack (pyproject.toml)
# fire
# llama-stack-client
tiktoken==0.8.0 tiktoken==0.8.0
# via llama-models
tqdm==4.67.1 tqdm==4.67.1
# via
# huggingface-hub
# llama-stack-client
typing-extensions==4.12.2 typing-extensions==4.12.2
# via
# anyio
# huggingface-hub
# llama-stack-client
# pydantic
# pydantic-core
# referencing
# rich
tzdata==2025.1 tzdata==2025.1
# via pandas
urllib3==2.3.0 urllib3==2.3.0
# via
# blobfile
# requests
wcwidth==0.2.13 wcwidth==0.2.13
# via prompt-toolkit