fix: update pyproject to include recursive LS deps (#2404)

trying to run `llama` cli after installing wheel fails with this error 
```
Traceback (most recent call last):
  File "/tmp/tmp.wdZath9U6j/.venv/bin/llama", line 4, in <module>
    from llama_stack.cli.llama import main
  File "/tmp/tmp.wdZath9U6j/.venv/lib/python3.10/site-packages/llama_stack/__init__.py", line 7, in <module>
    from llama_stack.distribution.library_client import (  # noqa: F401
ModuleNotFoundError: No module named 'llama_stack.distribution.library_client'
``` 

This PR fixes it by ensurring that all sub-directories of `llama_stack`
are also included.

Also, fixes the missing `fastapi` dependency issue.
This commit is contained in:
Hardik Shah 2025-06-05 11:46:48 -07:00 committed by GitHub
parent 4fb228a1d8
commit 04592b9590
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 2317 additions and 2308 deletions

View file

@ -22,6 +22,7 @@ classifiers = [
]
dependencies = [
"aiohttp",
"fastapi",
"fire",
"httpx",
"huggingface-hub",
@ -67,7 +68,6 @@ dev = [
"types-setuptools",
"pre-commit",
"uvicorn",
"fastapi",
"ruamel.yaml", # needed for openapi generator
]
# These are the dependencies required for running unit tests.
@ -132,7 +132,8 @@ llama = "llama_stack.cli.llama:main"
install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_presigned"
[tool.setuptools.packages.find]
include = ["llama_stack"]
where = ["."]
include = ["llama_stack", "llama_stack.*"]
[[tool.uv.index]]
name = "pytorch-cpu"