diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index e961595cc..c612a8078 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -19,7 +19,7 @@ "@radix-ui/react-tooltip": "^1.2.6", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "llama-stack-client": "0.2.8", + "llama-stack-client": "0.2.9", "lucide-react": "^0.510.0", "next": "15.3.2", "next-themes": "^0.4.6", diff --git a/pyproject.toml b/pyproject.toml index 2bb6292aa..f79857c3d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "llama_stack" -version = "0.2.8" +version = "0.2.9" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -27,7 +27,7 @@ dependencies = [ "huggingface-hub", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.2.8", + "llama-stack-client>=0.2.9", "openai>=1.66", "prompt-toolkit", "python-dotenv", @@ -47,7 +47,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.2.8", + "llama-stack-client>=0.2.9", "streamlit-option-menu", ] diff --git a/requirements.txt b/requirements.txt index 0c079a855..d2e4c1005 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt +# uv export --frozen --no-hashes --no-emit-project --output-file=requirements.txt aiohappyeyeballs==2.5.0 # via aiohttp aiohttp==3.11.13 @@ -14,6 +14,10 @@ anyio==4.8.0 # llama-stack-client # openai # starlette +appnope==0.1.4 ; sys_platform == 'darwin' + # via ipykernel +asttokens==3.0.0 + # via stack-data async-timeout==5.0.1 ; python_full_version < '3.11' # via aiohttp attrs==25.1.0 @@ -21,19 +25,41 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing +black==25.1.0 certifi==2025.1.31 # via # httpcore # httpx # requests +cffi==1.17.1 ; implementation_name == 'pypy' + # via pyzmq +cfgv==3.4.0 + # via pre-commit charset-normalizer==3.4.1 # via requests click==8.1.8 - # via llama-stack-client + # via + # black + # llama-stack-client + # uvicorn colorama==0.4.6 ; sys_platform == 'win32' # via # click + # ipython + # pytest # tqdm +comm==0.2.2 + # via ipykernel +coverage==7.6.12 + # via + # nbval + # pytest-cov +debugpy==1.8.12 + # via ipykernel +decorator==5.1.1 + # via ipython +distlib==0.3.9 + # via virtualenv distro==1.9.0 # via # llama-stack-client @@ -41,9 +67,19 @@ distro==1.9.0 ecdsa==0.19.1 # via python-jose exceptiongroup==1.2.2 ; python_full_version < '3.11' - # via anyio + # via + # anyio + # ipython + # pytest +executing==2.2.0 + # via stack-data +fastapi==0.115.8 +fastjsonschema==2.21.1 + # via nbformat filelock==3.17.0 - # via huggingface-hub + # via + # huggingface-hub + # virtualenv fire==0.7.0 # via llama-stack frozenlist==1.5.0 @@ -56,6 +92,7 @@ h11==0.16.0 # via # httpcore # llama-stack + # uvicorn httpcore==1.0.9 # via httpx httpx==0.28.1 @@ -65,77 +102,170 @@ httpx==0.28.1 # openai huggingface-hub==0.29.0 # via llama-stack +identify==2.6.7 + # via pre-commit idna==3.10 # via # anyio # httpx # requests # yarl +iniconfig==2.0.0 + # via pytest +ipykernel==6.29.5 + # via nbval +ipython==8.32.0 + # via ipykernel +jedi==0.19.2 + # via ipython jinja2==3.1.6 - # via llama-stack + # via + # llama-stack + # pytest-html jiter==0.8.2 # via openai jsonschema==4.23.0 - # via llama-stack + # via + # llama-stack + # nbformat jsonschema-specifications==2024.10.1 # via jsonschema -llama-stack-client==0.2.8 +jupyter-client==8.6.3 + # via + # ipykernel + # nbval +jupyter-core==5.7.2 + # via + # ipykernel + # jupyter-client + # nbformat +llama-stack-client==0.2.9 # via llama-stack markdown-it-py==3.0.0 # via rich markupsafe==3.0.2 # via jinja2 +matplotlib-inline==0.1.7 + # via + # ipykernel + # ipython mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via # aiohttp # yarl +mypy-extensions==1.0.0 + # via black +nbformat==5.10.4 + # via nbval +nbval==0.11.0 +nest-asyncio==1.6.0 + # via ipykernel +nodeenv==1.9.1 + # via pre-commit numpy==2.2.3 # via pandas openai==1.71.0 # via llama-stack packaging==24.2 - # via huggingface-hub + # via + # black + # huggingface-hub + # ipykernel + # pytest pandas==2.2.3 # via llama-stack-client +parso==0.8.4 + # via jedi +pathspec==0.12.1 + # via black +pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' + # via ipython pillow==11.1.0 # via llama-stack +platformdirs==4.3.6 + # via + # black + # jupyter-core + # virtualenv +pluggy==1.5.0 + # via pytest +pre-commit==4.1.0 prompt-toolkit==3.0.50 # via + # ipython # llama-stack # llama-stack-client propcache==0.3.0 # via # aiohttp # yarl +psutil==7.0.0 + # via ipykernel +ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' + # via pexpect +pure-eval==0.2.3 + # via stack-data pyaml==25.1.0 # via llama-stack-client pyasn1==0.4.8 # via # python-jose # rsa +pycparser==2.22 ; implementation_name == 'pypy' + # via cffi pydantic==2.10.6 # via + # fastapi # llama-stack # llama-stack-client # openai pydantic-core==2.27.2 # via pydantic pygments==2.19.1 - # via rich + # via + # ipython + # rich +pytest==8.3.4 + # via + # nbval + # pytest-asyncio + # pytest-cov + # pytest-html + # pytest-json-report + # pytest-metadata + # pytest-timeout +pytest-asyncio==0.25.3 +pytest-cov==6.0.0 +pytest-html==4.1.1 +pytest-json-report==1.5.0 +pytest-metadata==3.1.1 + # via + # pytest-html + # pytest-json-report +pytest-timeout==2.4.0 python-dateutil==2.9.0.post0 - # via pandas + # via + # jupyter-client + # pandas python-dotenv==1.0.1 # via llama-stack python-jose==3.4.0 # via llama-stack pytz==2025.1 # via pandas +pywin32==308 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' + # via jupyter-core pyyaml==6.0.2 # via # huggingface-hub + # pre-commit # pyaml +pyzmq==26.2.1 + # via + # ipykernel + # jupyter-client referencing==0.36.2 # via # jsonschema @@ -157,6 +287,10 @@ rpds-py==0.22.3 # referencing rsa==4.9 # via python-jose +ruamel-yaml==0.18.10 +ruamel-yaml-clib==0.2.12 ; python_full_version < '3.13' and platform_python_implementation == 'CPython' + # via ruamel-yaml +ruff==0.9.6 setuptools==80.8.0 # via llama-stack six==1.17.0 @@ -168,8 +302,12 @@ sniffio==1.3.1 # anyio # llama-stack-client # openai +stack-data==0.6.3 + # via ipython starlette==0.45.3 - # via llama-stack + # via + # fastapi + # llama-stack termcolor==2.5.0 # via # fire @@ -177,15 +315,38 @@ termcolor==2.5.0 # llama-stack-client tiktoken==0.9.0 # via llama-stack +tomli==2.2.1 ; python_full_version <= '3.11' + # via + # black + # coverage + # pytest +tornado==6.4.2 + # via + # ipykernel + # jupyter-client tqdm==4.67.1 # via # huggingface-hub # llama-stack-client # openai +traitlets==5.14.3 + # via + # comm + # ipykernel + # ipython + # jupyter-client + # jupyter-core + # matplotlib-inline + # nbformat +types-requests==2.32.0.20241016 +types-setuptools==75.8.0.20250210 typing-extensions==4.12.2 # via # anyio + # black + # fastapi # huggingface-hub + # ipython # llama-stack-client # multidict # openai @@ -193,10 +354,16 @@ typing-extensions==4.12.2 # pydantic-core # referencing # rich + # uvicorn tzdata==2025.1 # via pandas urllib3==2.3.0 - # via requests + # via + # requests + # types-requests +uvicorn==0.34.0 +virtualenv==20.29.2 + # via pre-commit wcwidth==0.2.13 # via prompt-toolkit yarl==1.18.3 diff --git a/uv.lock b/uv.lock index dae04b5f6..f67cb0656 100644 --- a/uv.lock +++ b/uv.lock @@ -1453,7 +1453,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/65/c6/246100fa3967074d9 [[package]] name = "llama-stack" -version = "0.2.8" +version = "0.2.9" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -1566,8 +1566,8 @@ requires-dist = [ { name = "huggingface-hub" }, { name = "jinja2", specifier = ">=3.1.6" }, { name = "jsonschema" }, - { name = "llama-stack-client", specifier = ">=0.2.8" }, - { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.8" }, + { name = "llama-stack-client", specifier = ">=0.2.9" }, + { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.9" }, { name = "openai", specifier = ">=1.66" }, { name = "pandas", marker = "extra == 'ui'" }, { name = "pillow" }, @@ -1659,7 +1659,7 @@ unit = [ [[package]] name = "llama-stack-client" -version = "0.2.8" +version = "0.2.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1676,9 +1676,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/2e/c29ad9af892c0412b923b6fd5423fae7f1912444edb929c035b93f0540cd/llama_stack_client-0.2.8.tar.gz", hash = "sha256:40cc14ec9ad37969249d972abd681c925f3d4866fc4fa75a016ef1fe87cc7a40", size = 269661, upload-time = "2025-05-27T20:27:55.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/86/80c2d87db0c4320c4e1cfb616d92c823e67d04b7cff41b32a14f188c0e43/llama_stack_client-0.2.9.tar.gz", hash = "sha256:529c24e17d3f2d57abc619c88b9989bf2a96dc1c90c3a80c5361e25a798e7729", size = 269660, upload-time = "2025-05-30T19:42:43.175Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/57/71ee1ad3a3e02dd1d52284a811c5b2caf72f4b704a9177bcf56e6114b56c/llama_stack_client-0.2.8-py3-none-any.whl", hash = "sha256:80b95d136b9a6e39a859578ad7d9536662c564d7f6483e45b010708608e4768d", size = 307589, upload-time = "2025-05-27T20:27:54.153Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/f251a9c4e81cabc3f8f8c737db81cce0d9785b2d547627ddbb3563c02c26/llama_stack_client-0.2.9-py3-none-any.whl", hash = "sha256:20eca8fc6f6c2e3f7a20fa2cf45782a3c5dc20d430746d563de4966af6cd42e9", size = 307590, upload-time = "2025-05-30T19:42:41.855Z" }, ] [[package]]