fix: update pyproject to include recursive LS deps (#2404)

trying to run `llama` cli after installing wheel fails with this error 
```
Traceback (most recent call last):
  File "/tmp/tmp.wdZath9U6j/.venv/bin/llama", line 4, in <module>
    from llama_stack.cli.llama import main
  File "/tmp/tmp.wdZath9U6j/.venv/lib/python3.10/site-packages/llama_stack/__init__.py", line 7, in <module>
    from llama_stack.distribution.library_client import (  # noqa: F401
ModuleNotFoundError: No module named 'llama_stack.distribution.library_client'
``` 

This PR fixes it by ensurring that all sub-directories of `llama_stack`
are also included.

Also, fixes the missing `fastapi` dependency issue.
This commit is contained in:
Hardik Shah 2025-06-05 11:46:48 -07:00 committed by GitHub
parent 4fb228a1d8
commit 04592b9590
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 2317 additions and 2308 deletions

View file

@ -22,6 +22,7 @@ classifiers = [
]
dependencies = [
"aiohttp",
"fastapi",
"fire",
"httpx",
"huggingface-hub",
@ -67,7 +68,6 @@ dev = [
"types-setuptools",
"pre-commit",
"uvicorn",
"fastapi",
"ruamel.yaml", # needed for openapi generator
]
# These are the dependencies required for running unit tests.
@ -132,7 +132,8 @@ llama = "llama_stack.cli.llama:main"
install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_presigned"
[tool.setuptools.packages.find]
include = ["llama_stack"]
where = ["."]
include = ["llama_stack", "llama_stack.*"]
[[tool.uv.index]]
name = "pytorch-cpu"

View file

@ -42,6 +42,8 @@ ecdsa==0.19.1
# via python-jose
exceptiongroup==1.2.2 ; python_full_version < '3.11'
# via anyio
fastapi==0.115.8
# via llama-stack
filelock==3.17.0
# via huggingface-hub
fire==0.7.0
@ -117,6 +119,7 @@ pyasn1==0.4.8
# rsa
pydantic==2.10.6
# via
# fastapi
# llama-stack
# llama-stack-client
# openai
@ -171,7 +174,9 @@ sniffio==1.3.1
# llama-stack-client
# openai
starlette==0.45.3
# via llama-stack
# via
# fastapi
# llama-stack
termcolor==2.5.0
# via
# fire
@ -187,6 +192,7 @@ tqdm==4.67.1
typing-extensions==4.12.2
# via
# anyio
# fastapi
# huggingface-hub
# llama-stack-client
# multidict

4612
uv.lock generated

File diff suppressed because it is too large Load diff