Merge upstream/main and resolve conflicts

Resolved merge conflicts in:
- Documentation files: updated vector IO provider docs to include both kvstore fields and embedding model configuration
- Config files: merged kvstore requirements from upstream with embedding model fields
- Dependencies: updated to latest client versions while preserving llama-models dependency
- Regenerated lockfiles to ensure consistency

All embedding model configuration features preserved while incorporating upstream changes.
This commit is contained in:
skamenan7 2025-07-16 19:57:02 -04:00
commit 6634b21a76
92 changed files with 3069 additions and 2481 deletions

View file

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "llama_stack"
version = "0.2.14"
version = "0.2.15"
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
description = "Llama Stack"
readme = "README.md"
@ -28,8 +28,9 @@ dependencies = [
"huggingface-hub>=0.30.0,<1.0",
"jinja2>=3.1.6",
"jsonschema",
"llama-api-client>=0.1.2",
"llama-models", # canonical source for model implementations
"llama-stack-client>=0.2.14",
"llama-stack-client>=0.2.15",
"openai>=1.66",
"prompt-toolkit",
"python-dotenv",
@ -53,7 +54,7 @@ dependencies = [
ui = [
"streamlit",
"pandas",
"llama-stack-client>=0.2.14",
"llama-stack-client>=0.2.15",
"streamlit-option-menu",
]
@ -126,6 +127,7 @@ docs = [
"sphinxcontrib.redoc",
"sphinxcontrib.video",
"sphinxcontrib.mermaid",
"sphinx-reredirects",
"tomli",
"linkify",
"sphinxcontrib.openapi",