From 5cc44dac3dc1a81c87e82b60ec6ec4f71ebbce46 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Tue, 15 Jul 2025 20:28:28 -0700 Subject: [PATCH] build: Bump version to 0.2.15 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +- requirements.txt | 174 ------------------------------------ 3 files changed, 4 insertions(+), 178 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index b38efe309..9c5ff06df 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -20,7 +20,7 @@ "@radix-ui/react-tooltip": "^1.2.6", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "llama-stack-client": "^0.2.14", + "llama-stack-client": ""0.2.15", "lucide-react": "^0.510.0", "next": "15.3.3", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index 492058451..b557dfb9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "llama_stack" -version = "0.2.15rc1" +version = "0.2.15" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -28,7 +28,7 @@ dependencies = [ "huggingface-hub>=0.30.0,<1.0", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.2.15rc1", + "llama-stack-client>=0.2.15", "openai>=1.66", "prompt-toolkit", "python-dotenv", @@ -52,7 +52,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.2.15rc1", + "llama-stack-client>=0.2.15", "streamlit-option-menu", ] diff --git a/requirements.txt b/requirements.txt index 8ec138a6f..4505e68bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,261 +1,87 @@ # This file was autogenerated by uv via the following command: # uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt aiohappyeyeballs==2.5.0 - # via aiohttp aiohttp==3.12.13 - # via llama-stack aiosignal==1.3.2 - # via aiohttp aiosqlite==0.21.0 - # via llama-stack annotated-types==0.7.0 - # via pydantic anyio==4.8.0 - # via - # httpx - # llama-stack-client - # openai - # starlette asyncpg==0.30.0 - # via llama-stack attrs==25.1.0 - # via - # aiohttp - # jsonschema - # referencing certifi==2025.1.31 - # via - # httpcore - # httpx - # requests cffi==1.17.1 ; platform_python_implementation != 'PyPy' - # via cryptography charset-normalizer==3.4.1 - # via requests click==8.1.8 - # via - # llama-stack-client - # uvicorn colorama==0.4.6 ; sys_platform == 'win32' - # via - # click - # tqdm cryptography==45.0.5 - # via python-jose deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions distro==1.9.0 - # via - # llama-stack-client - # openai ecdsa==0.19.1 - # via python-jose fastapi==0.115.8 - # via llama-stack filelock==3.17.0 - # via huggingface-hub fire==0.7.0 - # via - # llama-stack - # llama-stack-client frozenlist==1.5.0 - # via - # aiohttp - # aiosignal fsspec==2024.12.0 - # via huggingface-hub googleapis-common-protos==1.67.0 - # via opentelemetry-exporter-otlp-proto-http h11==0.16.0 - # via - # httpcore - # llama-stack - # uvicorn hf-xet==1.1.5 ; platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64' - # via huggingface-hub httpcore==1.0.9 - # via httpx httpx==0.28.1 - # via - # llama-stack - # llama-stack-client - # openai huggingface-hub==0.33.0 - # via llama-stack idna==3.10 - # via - # anyio - # httpx - # requests - # yarl importlib-metadata==8.5.0 - # via opentelemetry-api jinja2==3.1.6 - # via llama-stack jiter==0.8.2 - # via openai jsonschema==4.23.0 - # via llama-stack jsonschema-specifications==2024.10.1 - # via jsonschema llama-stack-client==0.2.14 - # via llama-stack markdown-it-py==3.0.0 - # via rich markupsafe==3.0.2 - # via jinja2 mdurl==0.1.2 - # via markdown-it-py multidict==6.1.0 - # via - # aiohttp - # yarl numpy==2.2.3 - # via pandas openai==1.71.0 - # via llama-stack opentelemetry-api==1.30.0 - # via - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-sdk - # opentelemetry-semantic-conventions opentelemetry-exporter-otlp-proto-common==1.30.0 - # via opentelemetry-exporter-otlp-proto-http opentelemetry-exporter-otlp-proto-http==1.30.0 - # via llama-stack opentelemetry-proto==1.30.0 - # via - # opentelemetry-exporter-otlp-proto-common - # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.30.0 - # via - # llama-stack - # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.51b0 - # via opentelemetry-sdk packaging==24.2 - # via huggingface-hub pandas==2.2.3 - # via llama-stack-client pillow==11.1.0 - # via llama-stack prompt-toolkit==3.0.50 - # via - # llama-stack - # llama-stack-client propcache==0.3.0 - # via - # aiohttp - # yarl protobuf==5.29.5 - # via - # googleapis-common-protos - # opentelemetry-proto pyaml==25.1.0 - # via llama-stack-client pyasn1==0.4.8 - # via - # python-jose - # rsa pycparser==2.22 ; platform_python_implementation != 'PyPy' - # via cffi pydantic==2.10.6 - # via - # fastapi - # llama-stack - # llama-stack-client - # openai pydantic-core==2.27.2 - # via pydantic pygments==2.19.1 - # via rich python-dateutil==2.9.0.post0 - # via pandas python-dotenv==1.0.1 - # via llama-stack python-jose==3.4.0 - # via llama-stack python-multipart==0.0.20 - # via llama-stack pytz==2025.1 - # via pandas pyyaml==6.0.2 - # via - # huggingface-hub - # pyaml referencing==0.36.2 - # via - # jsonschema - # jsonschema-specifications regex==2024.11.6 - # via tiktoken requests==2.32.4 - # via - # huggingface-hub - # llama-stack-client - # opentelemetry-exporter-otlp-proto-http - # tiktoken rich==13.9.4 - # via - # llama-stack - # llama-stack-client rpds-py==0.22.3 - # via - # jsonschema - # referencing rsa==4.9 - # via python-jose six==1.17.0 - # via - # ecdsa - # python-dateutil sniffio==1.3.1 - # via - # anyio - # llama-stack-client - # openai starlette==0.45.3 - # via - # fastapi - # llama-stack termcolor==2.5.0 - # via - # fire - # llama-stack - # llama-stack-client tiktoken==0.9.0 - # via llama-stack tqdm==4.67.1 - # via - # huggingface-hub - # llama-stack-client - # openai typing-extensions==4.12.2 - # via - # aiosqlite - # anyio - # fastapi - # huggingface-hub - # llama-stack-client - # openai - # opentelemetry-sdk - # pydantic - # pydantic-core - # referencing tzdata==2025.1 - # via pandas urllib3==2.5.0 - # via requests uvicorn==0.34.0 - # via llama-stack wcwidth==0.2.13 - # via prompt-toolkit wrapt==1.17.2 - # via deprecated yarl==1.18.3 - # via aiohttp zipp==3.21.0 - # via importlib-metadata