From 44bd820f04a0860ce553b9a7ca0b34cfc73b5bee Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 29 Aug 2025 21:09:54 +0000 Subject: [PATCH] build: Bump version to 0.2.20 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index 31c836057..a9c56f98e 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -23,7 +23,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^11.18.2", - "llama-stack-client": "^0.2.19", + "llama-stack-client": "^0.2.20", "lucide-react": "^0.510.0", "next": "15.3.3", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index 0e1de89eb..aa1813e49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ required-version = ">=0.7.0" [project] name = "llama_stack" -version = "0.2.20rc1" +version = "0.2.20" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -31,7 +31,7 @@ dependencies = [ "huggingface-hub>=0.34.0,<1.0", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.2.20rc1", + "llama-stack-client>=0.2.20", "llama-api-client>=0.1.2", "openai>=1.99.6,<1.100.0", "prompt-toolkit", @@ -56,7 +56,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.2.20rc1", + "llama-stack-client>=0.2.20", "streamlit-option-menu", ]