From bf091306feae47ac555794dec7c083cd7cd54787 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 21 Oct 2025 23:58:10 +0000 Subject: [PATCH] build: Bump version to 0.3.0 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index 07b3465e5..9350be16a 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -23,7 +23,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "^0.2.23", + "llama-stack-client": "^0.3.0", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index 7b014fb1f..741dd17e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ required-version = ">=0.7.0" [project] name = "llama_stack" -version = "0.3.0rc6" +version = "0.3.0" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -30,7 +30,7 @@ dependencies = [ "httpx", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.3.0rc6", + "llama-stack-client>=0.3.0", "openai>=1.107", # for expires_after support "prompt-toolkit", "python-dotenv", @@ -55,7 +55,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.3.0rc6", + "llama-stack-client>=0.3.0", "streamlit-option-menu", ]