forked from phoenix-oss/llama-stack-mirror
build: Bump version to 0.2.9
This commit is contained in:
parent
2603f10f95
commit
ad15276da1
4 changed files with 189 additions and 22 deletions
|
@ -19,7 +19,7 @@
|
||||||
"@radix-ui/react-tooltip": "^1.2.6",
|
"@radix-ui/react-tooltip": "^1.2.6",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"llama-stack-client": "0.2.8",
|
"llama-stack-client": "0.2.9",
|
||||||
"lucide-react": "^0.510.0",
|
"lucide-react": "^0.510.0",
|
||||||
"next": "15.3.2",
|
"next": "15.3.2",
|
||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "llama_stack"
|
name = "llama_stack"
|
||||||
version = "0.2.8"
|
version = "0.2.9"
|
||||||
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
|
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
|
||||||
description = "Llama Stack"
|
description = "Llama Stack"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
@ -27,7 +27,7 @@ dependencies = [
|
||||||
"huggingface-hub",
|
"huggingface-hub",
|
||||||
"jinja2>=3.1.6",
|
"jinja2>=3.1.6",
|
||||||
"jsonschema",
|
"jsonschema",
|
||||||
"llama-stack-client>=0.2.8",
|
"llama-stack-client>=0.2.9",
|
||||||
"openai>=1.66",
|
"openai>=1.66",
|
||||||
"prompt-toolkit",
|
"prompt-toolkit",
|
||||||
"python-dotenv",
|
"python-dotenv",
|
||||||
|
@ -47,7 +47,7 @@ dependencies = [
|
||||||
ui = [
|
ui = [
|
||||||
"streamlit",
|
"streamlit",
|
||||||
"pandas",
|
"pandas",
|
||||||
"llama-stack-client>=0.2.8",
|
"llama-stack-client>=0.2.9",
|
||||||
"streamlit-option-menu",
|
"streamlit-option-menu",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
191
requirements.txt
191
requirements.txt
|
@ -1,5 +1,5 @@
|
||||||
# This file was autogenerated by uv via the following command:
|
# This file was autogenerated by uv via the following command:
|
||||||
# uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt
|
# uv export --frozen --no-hashes --no-emit-project --output-file=requirements.txt
|
||||||
aiohappyeyeballs==2.5.0
|
aiohappyeyeballs==2.5.0
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
aiohttp==3.11.13
|
aiohttp==3.11.13
|
||||||
|
@ -14,6 +14,10 @@ anyio==4.8.0
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
# openai
|
# openai
|
||||||
# starlette
|
# starlette
|
||||||
|
appnope==0.1.4 ; sys_platform == 'darwin'
|
||||||
|
# via ipykernel
|
||||||
|
asttokens==3.0.0
|
||||||
|
# via stack-data
|
||||||
async-timeout==5.0.1 ; python_full_version < '3.11'
|
async-timeout==5.0.1 ; python_full_version < '3.11'
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
attrs==25.1.0
|
attrs==25.1.0
|
||||||
|
@ -21,19 +25,41 @@ attrs==25.1.0
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# jsonschema
|
# jsonschema
|
||||||
# referencing
|
# referencing
|
||||||
|
black==25.1.0
|
||||||
certifi==2025.1.31
|
certifi==2025.1.31
|
||||||
# via
|
# via
|
||||||
# httpcore
|
# httpcore
|
||||||
# httpx
|
# httpx
|
||||||
# requests
|
# requests
|
||||||
|
cffi==1.17.1 ; implementation_name == 'pypy'
|
||||||
|
# via pyzmq
|
||||||
|
cfgv==3.4.0
|
||||||
|
# via pre-commit
|
||||||
charset-normalizer==3.4.1
|
charset-normalizer==3.4.1
|
||||||
# via requests
|
# via requests
|
||||||
click==8.1.8
|
click==8.1.8
|
||||||
# via llama-stack-client
|
# via
|
||||||
|
# black
|
||||||
|
# llama-stack-client
|
||||||
|
# uvicorn
|
||||||
colorama==0.4.6 ; sys_platform == 'win32'
|
colorama==0.4.6 ; sys_platform == 'win32'
|
||||||
# via
|
# via
|
||||||
# click
|
# click
|
||||||
|
# ipython
|
||||||
|
# pytest
|
||||||
# tqdm
|
# tqdm
|
||||||
|
comm==0.2.2
|
||||||
|
# via ipykernel
|
||||||
|
coverage==7.6.12
|
||||||
|
# via
|
||||||
|
# nbval
|
||||||
|
# pytest-cov
|
||||||
|
debugpy==1.8.12
|
||||||
|
# via ipykernel
|
||||||
|
decorator==5.1.1
|
||||||
|
# via ipython
|
||||||
|
distlib==0.3.9
|
||||||
|
# via virtualenv
|
||||||
distro==1.9.0
|
distro==1.9.0
|
||||||
# via
|
# via
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
|
@ -41,9 +67,19 @@ distro==1.9.0
|
||||||
ecdsa==0.19.1
|
ecdsa==0.19.1
|
||||||
# via python-jose
|
# via python-jose
|
||||||
exceptiongroup==1.2.2 ; python_full_version < '3.11'
|
exceptiongroup==1.2.2 ; python_full_version < '3.11'
|
||||||
# via anyio
|
# via
|
||||||
|
# anyio
|
||||||
|
# ipython
|
||||||
|
# pytest
|
||||||
|
executing==2.2.0
|
||||||
|
# via stack-data
|
||||||
|
fastapi==0.115.8
|
||||||
|
fastjsonschema==2.21.1
|
||||||
|
# via nbformat
|
||||||
filelock==3.17.0
|
filelock==3.17.0
|
||||||
# via huggingface-hub
|
# via
|
||||||
|
# huggingface-hub
|
||||||
|
# virtualenv
|
||||||
fire==0.7.0
|
fire==0.7.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
frozenlist==1.5.0
|
frozenlist==1.5.0
|
||||||
|
@ -56,6 +92,7 @@ h11==0.16.0
|
||||||
# via
|
# via
|
||||||
# httpcore
|
# httpcore
|
||||||
# llama-stack
|
# llama-stack
|
||||||
|
# uvicorn
|
||||||
httpcore==1.0.9
|
httpcore==1.0.9
|
||||||
# via httpx
|
# via httpx
|
||||||
httpx==0.28.1
|
httpx==0.28.1
|
||||||
|
@ -65,77 +102,170 @@ httpx==0.28.1
|
||||||
# openai
|
# openai
|
||||||
huggingface-hub==0.29.0
|
huggingface-hub==0.29.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
|
identify==2.6.7
|
||||||
|
# via pre-commit
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via
|
# via
|
||||||
# anyio
|
# anyio
|
||||||
# httpx
|
# httpx
|
||||||
# requests
|
# requests
|
||||||
# yarl
|
# yarl
|
||||||
|
iniconfig==2.0.0
|
||||||
|
# via pytest
|
||||||
|
ipykernel==6.29.5
|
||||||
|
# via nbval
|
||||||
|
ipython==8.32.0
|
||||||
|
# via ipykernel
|
||||||
|
jedi==0.19.2
|
||||||
|
# via ipython
|
||||||
jinja2==3.1.6
|
jinja2==3.1.6
|
||||||
# via llama-stack
|
# via
|
||||||
|
# llama-stack
|
||||||
|
# pytest-html
|
||||||
jiter==0.8.2
|
jiter==0.8.2
|
||||||
# via openai
|
# via openai
|
||||||
jsonschema==4.23.0
|
jsonschema==4.23.0
|
||||||
# via llama-stack
|
# via
|
||||||
|
# llama-stack
|
||||||
|
# nbformat
|
||||||
jsonschema-specifications==2024.10.1
|
jsonschema-specifications==2024.10.1
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
llama-stack-client==0.2.8
|
jupyter-client==8.6.3
|
||||||
|
# via
|
||||||
|
# ipykernel
|
||||||
|
# nbval
|
||||||
|
jupyter-core==5.7.2
|
||||||
|
# via
|
||||||
|
# ipykernel
|
||||||
|
# jupyter-client
|
||||||
|
# nbformat
|
||||||
|
llama-stack-client==0.2.9
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
# via rich
|
# via rich
|
||||||
markupsafe==3.0.2
|
markupsafe==3.0.2
|
||||||
# via jinja2
|
# via jinja2
|
||||||
|
matplotlib-inline==0.1.7
|
||||||
|
# via
|
||||||
|
# ipykernel
|
||||||
|
# ipython
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
# via markdown-it-py
|
# via markdown-it-py
|
||||||
multidict==6.1.0
|
multidict==6.1.0
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# yarl
|
# yarl
|
||||||
|
mypy-extensions==1.0.0
|
||||||
|
# via black
|
||||||
|
nbformat==5.10.4
|
||||||
|
# via nbval
|
||||||
|
nbval==0.11.0
|
||||||
|
nest-asyncio==1.6.0
|
||||||
|
# via ipykernel
|
||||||
|
nodeenv==1.9.1
|
||||||
|
# via pre-commit
|
||||||
numpy==2.2.3
|
numpy==2.2.3
|
||||||
# via pandas
|
# via pandas
|
||||||
openai==1.71.0
|
openai==1.71.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
packaging==24.2
|
packaging==24.2
|
||||||
# via huggingface-hub
|
# via
|
||||||
|
# black
|
||||||
|
# huggingface-hub
|
||||||
|
# ipykernel
|
||||||
|
# pytest
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
# via llama-stack-client
|
# via llama-stack-client
|
||||||
|
parso==0.8.4
|
||||||
|
# via jedi
|
||||||
|
pathspec==0.12.1
|
||||||
|
# via black
|
||||||
|
pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32'
|
||||||
|
# via ipython
|
||||||
pillow==11.1.0
|
pillow==11.1.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
|
platformdirs==4.3.6
|
||||||
|
# via
|
||||||
|
# black
|
||||||
|
# jupyter-core
|
||||||
|
# virtualenv
|
||||||
|
pluggy==1.5.0
|
||||||
|
# via pytest
|
||||||
|
pre-commit==4.1.0
|
||||||
prompt-toolkit==3.0.50
|
prompt-toolkit==3.0.50
|
||||||
# via
|
# via
|
||||||
|
# ipython
|
||||||
# llama-stack
|
# llama-stack
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
propcache==0.3.0
|
propcache==0.3.0
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# yarl
|
# yarl
|
||||||
|
psutil==7.0.0
|
||||||
|
# via ipykernel
|
||||||
|
ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32'
|
||||||
|
# via pexpect
|
||||||
|
pure-eval==0.2.3
|
||||||
|
# via stack-data
|
||||||
pyaml==25.1.0
|
pyaml==25.1.0
|
||||||
# via llama-stack-client
|
# via llama-stack-client
|
||||||
pyasn1==0.4.8
|
pyasn1==0.4.8
|
||||||
# via
|
# via
|
||||||
# python-jose
|
# python-jose
|
||||||
# rsa
|
# rsa
|
||||||
|
pycparser==2.22 ; implementation_name == 'pypy'
|
||||||
|
# via cffi
|
||||||
pydantic==2.10.6
|
pydantic==2.10.6
|
||||||
# via
|
# via
|
||||||
|
# fastapi
|
||||||
# llama-stack
|
# llama-stack
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
# openai
|
# openai
|
||||||
pydantic-core==2.27.2
|
pydantic-core==2.27.2
|
||||||
# via pydantic
|
# via pydantic
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
# via rich
|
# via
|
||||||
|
# ipython
|
||||||
|
# rich
|
||||||
|
pytest==8.3.4
|
||||||
|
# via
|
||||||
|
# nbval
|
||||||
|
# pytest-asyncio
|
||||||
|
# pytest-cov
|
||||||
|
# pytest-html
|
||||||
|
# pytest-json-report
|
||||||
|
# pytest-metadata
|
||||||
|
# pytest-timeout
|
||||||
|
pytest-asyncio==0.25.3
|
||||||
|
pytest-cov==6.0.0
|
||||||
|
pytest-html==4.1.1
|
||||||
|
pytest-json-report==1.5.0
|
||||||
|
pytest-metadata==3.1.1
|
||||||
|
# via
|
||||||
|
# pytest-html
|
||||||
|
# pytest-json-report
|
||||||
|
pytest-timeout==2.4.0
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
# via pandas
|
# via
|
||||||
|
# jupyter-client
|
||||||
|
# pandas
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
python-jose==3.4.0
|
python-jose==3.4.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
pytz==2025.1
|
pytz==2025.1
|
||||||
# via pandas
|
# via pandas
|
||||||
|
pywin32==308 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32'
|
||||||
|
# via jupyter-core
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
# via
|
# via
|
||||||
# huggingface-hub
|
# huggingface-hub
|
||||||
|
# pre-commit
|
||||||
# pyaml
|
# pyaml
|
||||||
|
pyzmq==26.2.1
|
||||||
|
# via
|
||||||
|
# ipykernel
|
||||||
|
# jupyter-client
|
||||||
referencing==0.36.2
|
referencing==0.36.2
|
||||||
# via
|
# via
|
||||||
# jsonschema
|
# jsonschema
|
||||||
|
@ -157,6 +287,10 @@ rpds-py==0.22.3
|
||||||
# referencing
|
# referencing
|
||||||
rsa==4.9
|
rsa==4.9
|
||||||
# via python-jose
|
# via python-jose
|
||||||
|
ruamel-yaml==0.18.10
|
||||||
|
ruamel-yaml-clib==0.2.12 ; python_full_version < '3.13' and platform_python_implementation == 'CPython'
|
||||||
|
# via ruamel-yaml
|
||||||
|
ruff==0.9.6
|
||||||
setuptools==80.8.0
|
setuptools==80.8.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
|
@ -168,8 +302,12 @@ sniffio==1.3.1
|
||||||
# anyio
|
# anyio
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
# openai
|
# openai
|
||||||
|
stack-data==0.6.3
|
||||||
|
# via ipython
|
||||||
starlette==0.45.3
|
starlette==0.45.3
|
||||||
# via llama-stack
|
# via
|
||||||
|
# fastapi
|
||||||
|
# llama-stack
|
||||||
termcolor==2.5.0
|
termcolor==2.5.0
|
||||||
# via
|
# via
|
||||||
# fire
|
# fire
|
||||||
|
@ -177,15 +315,38 @@ termcolor==2.5.0
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
tiktoken==0.9.0
|
tiktoken==0.9.0
|
||||||
# via llama-stack
|
# via llama-stack
|
||||||
|
tomli==2.2.1 ; python_full_version <= '3.11'
|
||||||
|
# via
|
||||||
|
# black
|
||||||
|
# coverage
|
||||||
|
# pytest
|
||||||
|
tornado==6.4.2
|
||||||
|
# via
|
||||||
|
# ipykernel
|
||||||
|
# jupyter-client
|
||||||
tqdm==4.67.1
|
tqdm==4.67.1
|
||||||
# via
|
# via
|
||||||
# huggingface-hub
|
# huggingface-hub
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
# openai
|
# openai
|
||||||
|
traitlets==5.14.3
|
||||||
|
# via
|
||||||
|
# comm
|
||||||
|
# ipykernel
|
||||||
|
# ipython
|
||||||
|
# jupyter-client
|
||||||
|
# jupyter-core
|
||||||
|
# matplotlib-inline
|
||||||
|
# nbformat
|
||||||
|
types-requests==2.32.0.20241016
|
||||||
|
types-setuptools==75.8.0.20250210
|
||||||
typing-extensions==4.12.2
|
typing-extensions==4.12.2
|
||||||
# via
|
# via
|
||||||
# anyio
|
# anyio
|
||||||
|
# black
|
||||||
|
# fastapi
|
||||||
# huggingface-hub
|
# huggingface-hub
|
||||||
|
# ipython
|
||||||
# llama-stack-client
|
# llama-stack-client
|
||||||
# multidict
|
# multidict
|
||||||
# openai
|
# openai
|
||||||
|
@ -193,10 +354,16 @@ typing-extensions==4.12.2
|
||||||
# pydantic-core
|
# pydantic-core
|
||||||
# referencing
|
# referencing
|
||||||
# rich
|
# rich
|
||||||
|
# uvicorn
|
||||||
tzdata==2025.1
|
tzdata==2025.1
|
||||||
# via pandas
|
# via pandas
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
# via requests
|
# via
|
||||||
|
# requests
|
||||||
|
# types-requests
|
||||||
|
uvicorn==0.34.0
|
||||||
|
virtualenv==20.29.2
|
||||||
|
# via pre-commit
|
||||||
wcwidth==0.2.13
|
wcwidth==0.2.13
|
||||||
# via prompt-toolkit
|
# via prompt-toolkit
|
||||||
yarl==1.18.3
|
yarl==1.18.3
|
||||||
|
|
12
uv.lock
generated
12
uv.lock
generated
|
@ -1453,7 +1453,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/65/c6/246100fa3967074d9
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "llama-stack"
|
name = "llama-stack"
|
||||||
version = "0.2.8"
|
version = "0.2.9"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "aiohttp" },
|
{ name = "aiohttp" },
|
||||||
|
@ -1566,8 +1566,8 @@ requires-dist = [
|
||||||
{ name = "huggingface-hub" },
|
{ name = "huggingface-hub" },
|
||||||
{ name = "jinja2", specifier = ">=3.1.6" },
|
{ name = "jinja2", specifier = ">=3.1.6" },
|
||||||
{ name = "jsonschema" },
|
{ name = "jsonschema" },
|
||||||
{ name = "llama-stack-client", specifier = ">=0.2.8" },
|
{ name = "llama-stack-client", specifier = ">=0.2.9" },
|
||||||
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.8" },
|
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.9" },
|
||||||
{ name = "openai", specifier = ">=1.66" },
|
{ name = "openai", specifier = ">=1.66" },
|
||||||
{ name = "pandas", marker = "extra == 'ui'" },
|
{ name = "pandas", marker = "extra == 'ui'" },
|
||||||
{ name = "pillow" },
|
{ name = "pillow" },
|
||||||
|
@ -1659,7 +1659,7 @@ unit = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "llama-stack-client"
|
name = "llama-stack-client"
|
||||||
version = "0.2.8"
|
version = "0.2.9"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "anyio" },
|
{ name = "anyio" },
|
||||||
|
@ -1676,9 +1676,9 @@ dependencies = [
|
||||||
{ name = "tqdm" },
|
{ name = "tqdm" },
|
||||||
{ name = "typing-extensions" },
|
{ name = "typing-extensions" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/61/2e/c29ad9af892c0412b923b6fd5423fae7f1912444edb929c035b93f0540cd/llama_stack_client-0.2.8.tar.gz", hash = "sha256:40cc14ec9ad37969249d972abd681c925f3d4866fc4fa75a016ef1fe87cc7a40", size = 269661, upload-time = "2025-05-27T20:27:55.935Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/91/86/80c2d87db0c4320c4e1cfb616d92c823e67d04b7cff41b32a14f188c0e43/llama_stack_client-0.2.9.tar.gz", hash = "sha256:529c24e17d3f2d57abc619c88b9989bf2a96dc1c90c3a80c5361e25a798e7729", size = 269660, upload-time = "2025-05-30T19:42:43.175Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/28/57/71ee1ad3a3e02dd1d52284a811c5b2caf72f4b704a9177bcf56e6114b56c/llama_stack_client-0.2.8-py3-none-any.whl", hash = "sha256:80b95d136b9a6e39a859578ad7d9536662c564d7f6483e45b010708608e4768d", size = 307589, upload-time = "2025-05-27T20:27:54.153Z" },
|
{ url = "https://files.pythonhosted.org/packages/69/ca/f251a9c4e81cabc3f8f8c737db81cce0d9785b2d547627ddbb3563c02c26/llama_stack_client-0.2.9-py3-none-any.whl", hash = "sha256:20eca8fc6f6c2e3f7a20fa2cf45782a3c5dc20d430746d563de4966af6cd42e9", size = 307590, upload-time = "2025-05-30T19:42:41.855Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue