build: Bump version to 0.3.2

This commit is contained in:
github-actions[bot] 2025-11-12 23:19:12 +00:00
parent dbef00de28
commit 1536b8e890
3 changed files with 8 additions and 8 deletions

View file

@ -18,7 +18,7 @@
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"framer-motion": "^12.23.24", "framer-motion": "^12.23.24",
"llama-stack-client": "^0.3.1", "llama-stack-client": "^0.3.2",
"lucide-react": "^0.545.0", "lucide-react": "^0.545.0",
"next": "15.5.4", "next": "15.5.4",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",
@ -9635,9 +9635,9 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/llama-stack-client": { "node_modules/llama-stack-client": {
"version": "0.3.1", "version": "0.3.2",
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.1.tgz", "resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.2.tgz",
"integrity": "sha512-4aYoF2aAQiBSfxyZEtczeQmJn8q9T22ePDqGhR+ej5RG6a8wvl5B3v7ZoKuFkft+vcP/kbJ58GQZEPLekxekZA==", "integrity": "sha512-vzcnIN6k3sp7dhMXSnyrPSd82ACH/H3snj2uF6DgZwZCacKQNp2Y5XIT5qZZgoM1EUXbaxdVYFCeWD9yNCwatw==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@types/node": "^18.11.18", "@types/node": "^18.11.18",

View file

@ -23,7 +23,7 @@
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"framer-motion": "^12.23.24", "framer-motion": "^12.23.24",
"llama-stack-client": "^0.3.1", "llama-stack-client": "^0.3.2",
"lucide-react": "^0.545.0", "lucide-react": "^0.545.0",
"next": "15.5.4", "next": "15.5.4",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",

View file

@ -7,7 +7,7 @@ required-version = ">=0.7.0"
[project] [project]
name = "llama_stack" name = "llama_stack"
version = "0.3.2rc3" version = "0.3.2"
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
description = "Llama Stack" description = "Llama Stack"
readme = "README.md" readme = "README.md"
@ -30,7 +30,7 @@ dependencies = [
"httpx", "httpx",
"jinja2>=3.1.6", "jinja2>=3.1.6",
"jsonschema", "jsonschema",
"llama-stack-client>=0.3.2rc3", "llama-stack-client>=0.3.2",
"openai>=1.107", # for expires_after support "openai>=1.107", # for expires_after support
"prompt-toolkit", "prompt-toolkit",
"python-dotenv", "python-dotenv",
@ -55,7 +55,7 @@ dependencies = [
ui = [ ui = [
"streamlit", "streamlit",
"pandas", "pandas",
"llama-stack-client>=0.3.2rc3", "llama-stack-client>=0.3.2",
"streamlit-option-menu", "streamlit-option-menu",
] ]