From 033c1abf29a3fd07676c3a7785445b240b6d553a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 31 Oct 2025 22:54:10 +0000 Subject: [PATCH] build: Bump version to 0.3.1 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index 9350be16a..1e5c44292 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -23,7 +23,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "^0.3.0", + "llama-stack-client": "^0.3.1", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index a2f42cc62..d8b5e9276 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ required-version = ">=0.7.0" [project] name = "llama_stack" -version = "0.3.1rc5" +version = "0.3.1" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -30,7 +30,7 @@ dependencies = [ "httpx", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.3.1rc5", + "llama-stack-client>=0.3.1", "openai>=1.107", # for expires_after support "prompt-toolkit", "python-dotenv", @@ -55,7 +55,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.3.1rc5", + "llama-stack-client>=0.3.1", "streamlit-option-menu", ]