mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
fix: update pyproject to include recursive LS deps (#2404)
trying to run `llama` cli after installing wheel fails with this error
```
Traceback (most recent call last):
File "/tmp/tmp.wdZath9U6j/.venv/bin/llama", line 4, in <module>
from llama_stack.cli.llama import main
File "/tmp/tmp.wdZath9U6j/.venv/lib/python3.10/site-packages/llama_stack/__init__.py", line 7, in <module>
from llama_stack.distribution.library_client import ( # noqa: F401
ModuleNotFoundError: No module named 'llama_stack.distribution.library_client'
```
This PR fixes it by ensurring that all sub-directories of `llama_stack`
are also included.
Also, fixes the missing `fastapi` dependency issue.
This commit is contained in:
parent
4fb228a1d8
commit
04592b9590
3 changed files with 2317 additions and 2308 deletions
|
|
@ -22,6 +22,7 @@ classifiers = [
|
|||
]
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
"fastapi",
|
||||
"fire",
|
||||
"httpx",
|
||||
"huggingface-hub",
|
||||
|
|
@ -67,7 +68,6 @@ dev = [
|
|||
"types-setuptools",
|
||||
"pre-commit",
|
||||
"uvicorn",
|
||||
"fastapi",
|
||||
"ruamel.yaml", # needed for openapi generator
|
||||
]
|
||||
# These are the dependencies required for running unit tests.
|
||||
|
|
@ -132,7 +132,8 @@ llama = "llama_stack.cli.llama:main"
|
|||
install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_presigned"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["llama_stack"]
|
||||
where = ["."]
|
||||
include = ["llama_stack", "llama_stack.*"]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch-cpu"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue