mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-07 02:58:21 +00:00
renaming VecImpl -> VectorIO and SQLiteVecVectorIOImpl -> SQLiteVecVectorIOAdapter
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
f388778950
commit
947b811022
6 changed files with 12 additions and 12 deletions
|
@ -6,13 +6,13 @@
|
||||||
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from llama_stack.providers.datatypes import Api, ProviderSpec
|
from llama_stack.providers.datatypes import Api, ProviderSpec
|
||||||
from .config import SQLiteVecImplConfig
|
from .config import SQLiteVectorIOConfig
|
||||||
|
|
||||||
|
|
||||||
async def get_provider_impl(config: SQLiteVecImplConfig, deps: Dict[Api, ProviderSpec]):
|
async def get_provider_impl(config: SQLiteVectorIOConfig, deps: Dict[Api, ProviderSpec]):
|
||||||
from .sqlite_vec import SQLiteVecVectorIOImpl
|
from .sqlite_vec import SQLiteVecVectorIOAdapter
|
||||||
|
|
||||||
assert isinstance(config, SQLiteVecImplConfig), f"Unexpected config type: {type(config)}"
|
assert isinstance(config, SQLiteVectorIOConfig), f"Unexpected config type: {type(config)}"
|
||||||
impl = SQLiteVecVectorIOImpl(config, deps[Api.inference])
|
impl = SQLiteVecVectorIOAdapter(config, deps[Api.inference])
|
||||||
await impl.initialize()
|
await impl.initialize()
|
||||||
return impl
|
return impl
|
||||||
|
|
|
@ -14,7 +14,7 @@ from llama_stack.providers.utils.kvstore.config import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class SQLiteVecImplConfig(BaseModel):
|
class SQLiteVectorIOConfig(BaseModel):
|
||||||
db_path: str
|
db_path: str
|
||||||
kvstore: KVStoreConfig
|
kvstore: KVStoreConfig
|
||||||
|
|
||||||
|
|
|
@ -118,7 +118,7 @@ class SQLiteVecIndex(EmbeddingIndex):
|
||||||
return QueryChunksResponse(chunks=chunks, scores=scores)
|
return QueryChunksResponse(chunks=chunks, scores=scores)
|
||||||
|
|
||||||
|
|
||||||
class SQLiteVecVectorIOImpl(VectorIO, VectorDBsProtocolPrivate):
|
class SQLiteVecVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
|
||||||
"""
|
"""
|
||||||
A VectorIO implementation using SQLite + sqlite_vec.
|
A VectorIO implementation using SQLite + sqlite_vec.
|
||||||
This class handles vector database registration (with metadata stored in a table named `vector_dbs`)
|
This class handles vector database registration (with metadata stored in a table named `vector_dbs`)
|
||||||
|
|
|
@ -59,7 +59,7 @@ def available_providers() -> List[ProviderSpec]:
|
||||||
provider_type="inline::sqlite_vec",
|
provider_type="inline::sqlite_vec",
|
||||||
pip_packages=EMBEDDING_DEPS + ["sqlite-vec"],
|
pip_packages=EMBEDDING_DEPS + ["sqlite-vec"],
|
||||||
module="llama_stack.providers.inline.vector_io.sqlite_vec",
|
module="llama_stack.providers.inline.vector_io.sqlite_vec",
|
||||||
config_class="llama_stack.providers.inline.vector_io.sqlite_vec.SQLiteVecImplConfig",
|
config_class="llama_stack.providers.inline.vector_io.sqlite_vec.SQLiteVectorIOConfig",
|
||||||
api_dependencies=[Api.inference],
|
api_dependencies=[Api.inference],
|
||||||
),
|
),
|
||||||
remote_provider_spec(
|
remote_provider_spec(
|
||||||
|
|
|
@ -15,7 +15,7 @@ from llama_stack.distribution.datatypes import Api, Provider
|
||||||
|
|
||||||
from llama_stack.providers.inline.vector_io.chroma import ChromaInlineImplConfig
|
from llama_stack.providers.inline.vector_io.chroma import ChromaInlineImplConfig
|
||||||
from llama_stack.providers.inline.vector_io.faiss import FaissImplConfig
|
from llama_stack.providers.inline.vector_io.faiss import FaissImplConfig
|
||||||
from llama_stack.providers.inline.vector_io.sqlite_vec import SQLiteVecImplConfig
|
from llama_stack.providers.inline.vector_io.sqlite_vec import SQLiteVectorIOConfig
|
||||||
from llama_stack.providers.remote.vector_io.chroma import ChromaRemoteImplConfig
|
from llama_stack.providers.remote.vector_io.chroma import ChromaRemoteImplConfig
|
||||||
from llama_stack.providers.remote.vector_io.pgvector import PGVectorConfig
|
from llama_stack.providers.remote.vector_io.pgvector import PGVectorConfig
|
||||||
from llama_stack.providers.remote.vector_io.weaviate import WeaviateConfig
|
from llama_stack.providers.remote.vector_io.weaviate import WeaviateConfig
|
||||||
|
@ -62,7 +62,7 @@ def vector_io_sqlite_vec() -> ProviderFixture:
|
||||||
Provider(
|
Provider(
|
||||||
provider_id="sqlite_vec",
|
provider_id="sqlite_vec",
|
||||||
provider_type="inline::sqlite_vec",
|
provider_type="inline::sqlite_vec",
|
||||||
config=SQLiteVecImplConfig(
|
config=SQLiteVectorIOConfig(
|
||||||
kvstore=SqliteKVStoreConfig(db_path=temp_file.name).model_dump(),
|
kvstore=SqliteKVStoreConfig(db_path=temp_file.name).model_dump(),
|
||||||
).model_dump(),
|
).model_dump(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,7 +17,7 @@ from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
)
|
)
|
||||||
from llama_stack.providers.inline.vector_io.faiss.config import FaissImplConfig
|
from llama_stack.providers.inline.vector_io.faiss.config import FaissImplConfig
|
||||||
from llama_stack.providers.inline.vector_io.sqlite_vec.config import SQLiteVecImplConfig
|
from llama_stack.providers.inline.vector_io.sqlite_vec.config import SQLiteVectorIOConfig
|
||||||
from llama_stack.providers.remote.inference.ollama import OllamaImplConfig
|
from llama_stack.providers.remote.inference.ollama import OllamaImplConfig
|
||||||
from llama_stack.templates.template import DistributionTemplate, RunConfigSettings
|
from llama_stack.templates.template import DistributionTemplate, RunConfigSettings
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
vector_io_provider_sqlite = Provider(
|
vector_io_provider_sqlite = Provider(
|
||||||
provider_id="sqlite_vec",
|
provider_id="sqlite_vec",
|
||||||
provider_type="inline::sqlite_vec",
|
provider_type="inline::sqlite_vec",
|
||||||
config=SQLiteVecImplConfig.sample_run_config(f"distributions/{name}"),
|
config=SQLiteVectorIOConfig.sample_run_config(f"distributions/{name}"),
|
||||||
)
|
)
|
||||||
|
|
||||||
inference_model = ModelInput(
|
inference_model = ModelInput(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue