# This file was autogenerated by uv via the following command: # uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt annotated-types==0.7.0 # via pydantic anyio==4.8.0 # via # httpx # llama-stack-client # openai attrs==25.1.0 # via # jsonschema # referencing certifi==2025.1.31 # via # httpcore # httpx # requests charset-normalizer==3.4.1 # via requests click==8.1.8 # via llama-stack-client colorama==0.4.6 ; sys_platform == 'win32' # via # click # tqdm distro==1.9.0 # via # llama-stack-client # openai ecdsa==0.19.1 # via python-jose exceptiongroup==1.2.2 ; python_full_version < '3.11' # via anyio filelock==3.17.0 # via huggingface-hub fire==0.7.0 # via llama-stack fsspec==2024.12.0 # via huggingface-hub h11==0.16.0 # via # httpcore # llama-stack httpcore==1.0.9 # via httpx httpx==0.28.1 # via # llama-stack # llama-stack-client # openai huggingface-hub==0.29.0 # via llama-stack idna==3.10 # via # anyio # httpx # requests jinja2==3.1.6 # via llama-stack jiter==0.8.2 # via openai jsonschema==4.23.0 # via llama-stack jsonschema-specifications==2024.10.1 # via jsonschema llama-stack-client==0.2.8 # via llama-stack markdown-it-py==3.0.0 # via rich markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py numpy==2.2.3 # via pandas openai==1.71.0 # via llama-stack packaging==24.2 # via huggingface-hub pandas==2.2.3 # via llama-stack-client pillow==11.1.0 # via llama-stack prompt-toolkit==3.0.50 # via # llama-stack # llama-stack-client pyaml==25.1.0 # via llama-stack-client pyasn1==0.4.8 # via # python-jose # rsa pydantic==2.10.6 # via # llama-stack # llama-stack-client # openai pydantic-core==2.27.2 # via pydantic pygments==2.19.1 # via rich python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via llama-stack python-jose==3.4.0 # via llama-stack pytz==2025.1 # via pandas pyyaml==6.0.2 # via # huggingface-hub # pyaml referencing==0.36.2 # via # jsonschema # jsonschema-specifications regex==2024.11.6 # via tiktoken requests==2.32.3 # via # huggingface-hub # llama-stack # tiktoken rich==13.9.4 # via # llama-stack # llama-stack-client rpds-py==0.22.3 # via # jsonschema # referencing rsa==4.9 # via python-jose setuptools==80.8.0 # via llama-stack six==1.17.0 # via # ecdsa # python-dateutil sniffio==1.3.1 # via # anyio # llama-stack-client # openai termcolor==2.5.0 # via # fire # llama-stack # llama-stack-client tiktoken==0.9.0 # via llama-stack tqdm==4.67.1 # via # huggingface-hub # llama-stack-client # openai typing-extensions==4.12.2 # via # anyio # huggingface-hub # llama-stack-client # openai # pydantic # pydantic-core # referencing # rich tzdata==2025.1 # via pandas urllib3==2.3.0 # via requests wcwidth==0.2.13 # via prompt-toolkit