# This file was autogenerated by uv via the following command: # uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt aiohappyeyeballs==2.5.0 # via aiohttp aiohttp==3.11.13 # via llama-stack aiosignal==1.3.2 # via aiohttp aiosqlite==0.21.0 # via llama-stack annotated-types==0.7.0 # via pydantic anyio==4.8.0 # via # httpx # llama-stack-client # openai # starlette asyncpg==0.30.0 # via llama-stack attrs==25.1.0 # via # aiohttp # jsonschema # referencing certifi==2025.1.31 # via # httpcore # httpx # requests charset-normalizer==3.4.1 # via requests click==8.1.8 # via # llama-stack-client # uvicorn colorama==0.4.6 ; sys_platform == 'win32' # via # click # tqdm deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-http # opentelemetry-semantic-conventions distro==1.9.0 # via # llama-stack-client # openai ecdsa==0.19.1 # via python-jose fastapi==0.115.8 # via llama-stack filelock==3.17.0 # via huggingface-hub fire==0.7.0 # via llama-stack frozenlist==1.5.0 # via # aiohttp # aiosignal fsspec==2024.12.0 # via huggingface-hub googleapis-common-protos==1.67.0 # via opentelemetry-exporter-otlp-proto-http h11==0.16.0 # via # httpcore # llama-stack # uvicorn hf-xet==1.1.5 ; platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64' # via huggingface-hub httpcore==1.0.9 # via httpx httpx==0.28.1 # via # llama-stack # llama-stack-client # openai huggingface-hub==0.33.0 # via llama-stack idna==3.10 # via # anyio # httpx # requests # yarl importlib-metadata==8.5.0 # via opentelemetry-api jinja2==3.1.6 # via llama-stack jiter==0.8.2 # via openai jsonschema==4.23.0 # via llama-stack jsonschema-specifications==2024.10.1 # via jsonschema llama-stack-client==0.2.12 # via llama-stack markdown-it-py==3.0.0 # via rich markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via # aiohttp # yarl numpy==2.2.3 # via pandas openai==1.71.0 # via llama-stack opentelemetry-api==1.30.0 # via # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk # opentelemetry-semantic-conventions opentelemetry-exporter-otlp-proto-common==1.30.0 # via opentelemetry-exporter-otlp-proto-http opentelemetry-exporter-otlp-proto-http==1.30.0 # via llama-stack opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.30.0 # via # llama-stack # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.51b0 # via opentelemetry-sdk packaging==24.2 # via huggingface-hub pandas==2.2.3 # via llama-stack-client pillow==11.1.0 # via llama-stack prompt-toolkit==3.0.50 # via # llama-stack # llama-stack-client propcache==0.3.0 # via # aiohttp # yarl protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto pyaml==25.1.0 # via llama-stack-client pyasn1==0.4.8 # via # python-jose # rsa pydantic==2.10.6 # via # fastapi # llama-stack # llama-stack-client # openai pydantic-core==2.27.2 # via pydantic pygments==2.19.1 # via rich python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via llama-stack python-jose==3.4.0 # via llama-stack python-multipart==0.0.20 # via llama-stack pytz==2025.1 # via pandas pyyaml==6.0.2 # via # huggingface-hub # pyaml referencing==0.36.2 # via # jsonschema # jsonschema-specifications regex==2024.11.6 # via tiktoken requests==2.32.4 # via # huggingface-hub # opentelemetry-exporter-otlp-proto-http # tiktoken rich==13.9.4 # via # llama-stack # llama-stack-client rpds-py==0.22.3 # via # jsonschema # referencing rsa==4.9 # via python-jose six==1.17.0 # via # ecdsa # python-dateutil sniffio==1.3.1 # via # anyio # llama-stack-client # openai starlette==0.45.3 # via # fastapi # llama-stack termcolor==2.5.0 # via # fire # llama-stack # llama-stack-client tiktoken==0.9.0 # via llama-stack tqdm==4.67.1 # via # huggingface-hub # llama-stack-client # openai typing-extensions==4.12.2 # via # aiosqlite # anyio # fastapi # huggingface-hub # llama-stack-client # openai # opentelemetry-sdk # pydantic # pydantic-core # referencing tzdata==2025.1 # via pandas urllib3==2.3.0 # via requests uvicorn==0.34.0 # via llama-stack wcwidth==0.2.13 # via prompt-toolkit wrapt==1.17.2 # via deprecated yarl==1.18.3 # via aiohttp zipp==3.21.0 # via importlib-metadata