mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 01:48:05 +00:00
build: Bump version to 0.3.3
This commit is contained in:
parent
ff6d8d5a50
commit
2424a3d3b2
3 changed files with 8 additions and 8 deletions
8
llama_stack/ui/package-lock.json
generated
8
llama_stack/ui/package-lock.json
generated
|
|
@ -18,7 +18,7 @@
|
|||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"framer-motion": "^12.23.24",
|
||||
"llama-stack-client": "^0.3.2",
|
||||
"llama-stack-client": "^0.3.3",
|
||||
"lucide-react": "^0.545.0",
|
||||
"next": "15.5.4",
|
||||
"next-auth": "^4.24.11",
|
||||
|
|
@ -9635,9 +9635,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/llama-stack-client": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.2.tgz",
|
||||
"integrity": "sha512-vzcnIN6k3sp7dhMXSnyrPSd82ACH/H3snj2uF6DgZwZCacKQNp2Y5XIT5qZZgoM1EUXbaxdVYFCeWD9yNCwatw==",
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.3.tgz",
|
||||
"integrity": "sha512-eESw7ousfilyD3vQZrSVe51cuFn32VHXuFlrsVtopwLdjW6AOzuzLlfJLO2jroRqTGGI/vqaykqDvZSyxJHr9A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@
|
|||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"framer-motion": "^12.23.24",
|
||||
"llama-stack-client": "^0.3.2",
|
||||
"llama-stack-client": "^0.3.3",
|
||||
"lucide-react": "^0.545.0",
|
||||
"next": "15.5.4",
|
||||
"next-auth": "^4.24.11",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ required-version = ">=0.7.0"
|
|||
|
||||
[project]
|
||||
name = "llama_stack"
|
||||
version = "0.3.3rc1"
|
||||
version = "0.3.3"
|
||||
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
|
||||
description = "Llama Stack"
|
||||
readme = "README.md"
|
||||
|
|
@ -30,7 +30,7 @@ dependencies = [
|
|||
"httpx",
|
||||
"jinja2>=3.1.6",
|
||||
"jsonschema",
|
||||
"llama-stack-client>=0.3.3rc1",
|
||||
"llama-stack-client>=0.3.3",
|
||||
"openai>=1.107", # for expires_after support
|
||||
"prompt-toolkit",
|
||||
"python-dotenv",
|
||||
|
|
@ -55,7 +55,7 @@ dependencies = [
|
|||
ui = [
|
||||
"streamlit",
|
||||
"pandas",
|
||||
"llama-stack-client>=0.3.3rc1",
|
||||
"llama-stack-client>=0.3.3",
|
||||
"streamlit-option-menu",
|
||||
]
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue