mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
fix: update pyproject to include recursive LS deps (#2404)
trying to run `llama` cli after installing wheel fails with this error ``` Traceback (most recent call last): File "/tmp/tmp.wdZath9U6j/.venv/bin/llama", line 4, in <module> from llama_stack.cli.llama import main File "/tmp/tmp.wdZath9U6j/.venv/lib/python3.10/site-packages/llama_stack/__init__.py", line 7, in <module> from llama_stack.distribution.library_client import ( # noqa: F401 ModuleNotFoundError: No module named 'llama_stack.distribution.library_client' ``` This PR fixes it by ensurring that all sub-directories of `llama_stack` are also included. Also, fixes the missing `fastapi` dependency issue.
This commit is contained in:
parent
4fb228a1d8
commit
04592b9590
3 changed files with 2317 additions and 2308 deletions
|
@ -22,6 +22,7 @@ classifiers = [
|
|||
]
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
"fastapi",
|
||||
"fire",
|
||||
"httpx",
|
||||
"huggingface-hub",
|
||||
|
@ -67,7 +68,6 @@ dev = [
|
|||
"types-setuptools",
|
||||
"pre-commit",
|
||||
"uvicorn",
|
||||
"fastapi",
|
||||
"ruamel.yaml", # needed for openapi generator
|
||||
]
|
||||
# These are the dependencies required for running unit tests.
|
||||
|
@ -132,7 +132,8 @@ llama = "llama_stack.cli.llama:main"
|
|||
install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_presigned"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["llama_stack"]
|
||||
where = ["."]
|
||||
include = ["llama_stack", "llama_stack.*"]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch-cpu"
|
||||
|
|
|
@ -42,6 +42,8 @@ ecdsa==0.19.1
|
|||
# via python-jose
|
||||
exceptiongroup==1.2.2 ; python_full_version < '3.11'
|
||||
# via anyio
|
||||
fastapi==0.115.8
|
||||
# via llama-stack
|
||||
filelock==3.17.0
|
||||
# via huggingface-hub
|
||||
fire==0.7.0
|
||||
|
@ -117,6 +119,7 @@ pyasn1==0.4.8
|
|||
# rsa
|
||||
pydantic==2.10.6
|
||||
# via
|
||||
# fastapi
|
||||
# llama-stack
|
||||
# llama-stack-client
|
||||
# openai
|
||||
|
@ -171,7 +174,9 @@ sniffio==1.3.1
|
|||
# llama-stack-client
|
||||
# openai
|
||||
starlette==0.45.3
|
||||
# via llama-stack
|
||||
# via
|
||||
# fastapi
|
||||
# llama-stack
|
||||
termcolor==2.5.0
|
||||
# via
|
||||
# fire
|
||||
|
@ -187,6 +192,7 @@ tqdm==4.67.1
|
|||
typing-extensions==4.12.2
|
||||
# via
|
||||
# anyio
|
||||
# fastapi
|
||||
# huggingface-hub
|
||||
# llama-stack-client
|
||||
# multidict
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue