diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 371910108..3595accf5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,15 +43,14 @@ repos: rev: 0.5.26 hooks: - id: uv-export - args: ["--frozen", "--no-hashes", "--no-emit-project"] - - id: uv-sync - - id: pip-compile args: [ - "pyproject.toml", - "--output-file", - "requirements.txt" + "--frozen", + "--no-hashes", + "--no-emit-project", + "--output-file=requirements.txt" ] files: ^pyproject\.toml$ + - id: uv-sync # - repo: https://github.com/pre-commit/mirrors-mypy # rev: v1.14.0 diff --git a/requirements.txt b/requirements.txt index b6afe99c9..bb14b1eff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,159 +1,58 @@ # This file was autogenerated by uv via the following command: -# uv pip compile pyproject.toml --output-file requirements.txt +# uv export --frozen --no-hashes --no-emit-project annotated-types==0.7.0 - # via pydantic anyio==4.8.0 - # via - # httpx - # llama-stack-client attrs==25.1.0 - # via - # jsonschema - # referencing blobfile==3.0.0 - # via llama-stack (pyproject.toml) certifi==2025.1.31 - # via - # httpcore - # httpx - # requests charset-normalizer==3.4.1 - # via requests click==8.1.8 - # via llama-stack-client +colorama==0.4.6 ; sys_platform == 'win32' distro==1.9.0 - # via llama-stack-client -exceptiongroup==1.2.2 - # via anyio +exceptiongroup==1.2.2 ; python_full_version < '3.11' filelock==3.17.0 - # via - # blobfile - # huggingface-hub fire==0.7.0 - # via llama-stack (pyproject.toml) fsspec==2024.12.0 - # via huggingface-hub h11==0.14.0 - # via httpcore httpcore==1.0.7 - # via httpx httpx==0.28.1 - # via - # llama-stack (pyproject.toml) - # llama-stack-client huggingface-hub==0.28.1 - # via llama-stack (pyproject.toml) idna==3.10 - # via - # anyio - # httpx - # requests jinja2==3.1.5 - # via llama-models jsonschema==4.23.0 - # via llama-stack (pyproject.toml) jsonschema-specifications==2024.10.1 - # via jsonschema llama-models==0.1.2 - # via llama-stack (pyproject.toml) llama-stack-client==0.1.2 - # via llama-stack (pyproject.toml) lxml==5.3.0 - # via blobfile markdown-it-py==3.0.0 - # via rich markupsafe==3.0.2 - # via jinja2 mdurl==0.1.2 - # via markdown-it-py numpy==2.2.2 - # via pandas packaging==24.2 - # via huggingface-hub pandas==2.2.3 - # via llama-stack-client pillow==11.1.0 - # via llama-models prompt-toolkit==3.0.50 - # via - # llama-stack (pyproject.toml) - # llama-stack-client pyaml==25.1.0 - # via llama-stack-client pycryptodomex==3.21.0 - # via blobfile pydantic==2.10.6 - # via - # llama-stack (pyproject.toml) - # llama-models - # llama-stack-client pydantic-core==2.27.2 - # via pydantic pygments==2.19.1 - # via rich python-dateutil==2.9.0.post0 - # via pandas python-dotenv==1.0.1 - # via llama-stack (pyproject.toml) pytz==2025.1 - # via pandas pyyaml==6.0.2 - # via - # huggingface-hub - # llama-models - # pyaml referencing==0.36.2 - # via - # jsonschema - # jsonschema-specifications regex==2024.11.6 - # via tiktoken requests==2.32.3 - # via - # llama-stack (pyproject.toml) - # huggingface-hub - # tiktoken rich==13.9.4 - # via - # llama-stack (pyproject.toml) - # llama-stack-client rpds-py==0.22.3 - # via - # jsonschema - # referencing setuptools==75.8.0 - # via llama-stack (pyproject.toml) six==1.17.0 - # via python-dateutil sniffio==1.3.1 - # via - # anyio - # llama-stack-client termcolor==2.5.0 - # via - # llama-stack (pyproject.toml) - # fire - # llama-stack-client tiktoken==0.8.0 - # via llama-models tqdm==4.67.1 - # via - # huggingface-hub - # llama-stack-client typing-extensions==4.12.2 - # via - # anyio - # huggingface-hub - # llama-stack-client - # pydantic - # pydantic-core - # referencing - # rich tzdata==2025.1 - # via pandas urllib3==2.3.0 - # via - # blobfile - # requests wcwidth==0.2.13 - # via prompt-toolkit