From 5adc8bb8b148f8d0f1cf91251cca6f4bb3452271 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 20 Jun 2025 21:05:18 +0000 Subject: [PATCH] build: Bump version to 0.2.12 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index e9814663a..af9165256 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -20,7 +20,7 @@ "@radix-ui/react-tooltip": "^1.2.6", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "llama-stack-client": "0.2.11", + "llama-stack-client": "0.2.12", "lucide-react": "^0.510.0", "next": "15.3.2", "next-themes": "^0.4.6", diff --git a/pyproject.toml b/pyproject.toml index 437d8ad3b..9e0d7f0af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "llama_stack" -version = "0.2.12rc5" +version = "0.2.12" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -28,7 +28,7 @@ dependencies = [ "huggingface-hub", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.2.12rc5", + "llama-stack-client>=0.2.12", "openai>=1.66", "prompt-toolkit", "python-dotenv", @@ -49,7 +49,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.2.12rc5", + "llama-stack-client>=0.2.12", "streamlit-option-menu", ]