feat: rebase and implement file API methods

Signed-off-by: Varsha Prasad Narsing <varshaprasad96@gmail.com>
This commit is contained in:
Varsha Prasad Narsing 2025-06-25 16:59:29 -07:00
parent 918e68548f
commit dfafa5bbae
15 changed files with 212 additions and 214 deletions

View file

@ -5,20 +5,28 @@
# the root directory of this source tree.
from typing import Any, Literal
from typing import Any
from pydantic import BaseModel
from llama_stack.providers.utils.kvstore.config import (
KVStoreConfig,
SqliteKVStoreConfig,
)
from llama_stack.schema_utils import json_schema_type
@json_schema_type
class QdrantVectorIOConfig(BaseModel):
path: str
distance_metric: Literal["COSINE", "DOT", "EUCLID", "MANHATTAN"] = "COSINE"
kvstore: KVStoreConfig
@classmethod
def sample_run_config(cls, __distro_dir__: str) -> dict[str, Any]:
return {
"path": "${env.QDRANT_PATH:=~/.llama/" + __distro_dir__ + "}/" + "qdrant.db",
"kvstore": SqliteKVStoreConfig.sample_run_config(
__distro_dir__=__distro_dir__,
db_name="qdrant_registry.db",
),
}

View file

@ -192,7 +192,9 @@ class SQLiteVecIndex(EmbeddingIndex):
await asyncio.to_thread(_drop_tables)
async def add_chunks(self, chunks: list[Chunk], embeddings: NDArray, batch_size: int = 500):
async def add_chunks(
self, chunks: list[Chunk], embeddings: NDArray, metadata: dict[str, Any] | None = None, batch_size: int = 500
):
"""
Add new chunks along with their embeddings using batch inserts.
For each chunk, we insert its JSON into the metadata table and then insert its