From e565b911828d3f0902e141e8349d39f99ed382c6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 5 Aug 2025 01:43:30 +0000 Subject: [PATCH] build: Bump version to 0.2.17 --- llama_stack/ui/package.json | 2 +- pyproject.toml | 6 +++--- uv.lock | 12 ++++++------ 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index 7f1dad647..f7e2758c9 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -23,7 +23,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^11.18.2", - "llama-stack-client": "0.2.16", + "llama-stack-client": ""0.2.17", "lucide-react": "^0.510.0", "next": "15.3.3", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index efebd956a..e4932a916 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "llama_stack" -version = "0.2.16" +version = "0.2.17" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -28,7 +28,7 @@ dependencies = [ "huggingface-hub>=0.34.0,<1.0", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.2.16", + "llama-stack-client>=0.2.17", "llama-api-client>=0.1.2", "openai>=1.66", "prompt-toolkit", @@ -53,7 +53,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.2.16", + "llama-stack-client>=0.2.17", "streamlit-option-menu", ] diff --git a/uv.lock b/uv.lock index a5d2c4fd5..c10a7962c 100644 --- a/uv.lock +++ b/uv.lock @@ -1540,7 +1540,7 @@ wheels = [ [[package]] name = "llama-stack" -version = "0.2.16" +version = "0.2.17" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -1672,8 +1672,8 @@ requires-dist = [ { name = "jinja2", specifier = ">=3.1.6" }, { name = "jsonschema" }, { name = "llama-api-client", specifier = ">=0.1.2" }, - { name = "llama-stack-client", specifier = ">=0.2.16" }, - { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.16" }, + { name = "llama-stack-client", specifier = ">=0.2.17" }, + { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.17" }, { name = "openai", specifier = ">=1.66" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" }, { name = "opentelemetry-sdk", specifier = ">=1.30.0" }, @@ -1776,7 +1776,7 @@ unit = [ [[package]] name = "llama-stack-client" -version = "0.2.16" +version = "0.2.17" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1795,9 +1795,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/28/74ae2faae9af51205587b33fcf2f99a8af090de7aa4122701f2f70f04233/llama_stack_client-0.2.16.tar.gz", hash = "sha256:24294acc6bf40e79900a62f4fa61009acb9af7028b198b12c0ba8adab25c2049", size = 257642, upload-time = "2025-07-28T23:13:22.793Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/2a/bb2949d6a5c494d21da0c185d426e25eaa8016f8287b689249afc6c96fb5/llama_stack_client-0.2.17.tar.gz", hash = "sha256:1fe2070133c6356761e394fa346045e9b6b567d4c63157b9bc6be89b9a6e7a41", size = 257636, upload-time = "2025-08-05T01:42:55.911Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/ec/1874120a15b22f3a88d4e49700c870cc6540bc8c709a841db79a662d7949/llama_stack_client-0.2.16-py3-none-any.whl", hash = "sha256:5c0d13e6ac40143ce01cae4eec65fb39fe24e11f54b86afbd20f0033c38f83c0", size = 350329, upload-time = "2025-07-28T23:13:21.586Z" }, + { url = "https://files.pythonhosted.org/packages/81/fc/5eccc86b83c5ced3a3bca071d250a86ccafa4ff17546cf781deb7758ab74/llama_stack_client-0.2.17-py3-none-any.whl", hash = "sha256:336c32f8688700ff64717b8109f405dc87a990fbe310c2027ac9ed6d39d67d16", size = 350329, upload-time = "2025-08-05T01:42:54.381Z" }, ] [[package]]