fix: Update VectorIO config classes in registry

Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
Yuan Tang 2025-02-13 13:27:17 -05:00
parent 8ff27b58fa
commit 6eb65bbff5
No known key found for this signature in database
2 changed files with 9 additions and 9 deletions

View file

@ -42,7 +42,7 @@ def available_providers() -> List[ProviderSpec]:
provider_type="inline::meta-reference", provider_type="inline::meta-reference",
pip_packages=EMBEDDING_DEPS + ["faiss-cpu"], pip_packages=EMBEDDING_DEPS + ["faiss-cpu"],
module="llama_stack.providers.inline.vector_io.faiss", module="llama_stack.providers.inline.vector_io.faiss",
config_class="llama_stack.providers.inline.vector_io.faiss.FaissImplConfig", config_class="llama_stack.providers.inline.vector_io.faiss.FaissVectorIOConfig",
deprecation_warning="Please use the `inline::faiss` provider instead.", deprecation_warning="Please use the `inline::faiss` provider instead.",
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),
@ -51,7 +51,7 @@ def available_providers() -> List[ProviderSpec]:
provider_type="inline::faiss", provider_type="inline::faiss",
pip_packages=EMBEDDING_DEPS + ["faiss-cpu"], pip_packages=EMBEDDING_DEPS + ["faiss-cpu"],
module="llama_stack.providers.inline.vector_io.faiss", module="llama_stack.providers.inline.vector_io.faiss",
config_class="llama_stack.providers.inline.vector_io.faiss.FaissImplConfig", config_class="llama_stack.providers.inline.vector_io.faiss.FaissVectorIOConfig",
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),
InlineProviderSpec( InlineProviderSpec(
@ -68,7 +68,7 @@ def available_providers() -> List[ProviderSpec]:
adapter_type="chromadb", adapter_type="chromadb",
pip_packages=EMBEDDING_DEPS + ["chromadb-client"], pip_packages=EMBEDDING_DEPS + ["chromadb-client"],
module="llama_stack.providers.remote.vector_io.chroma", module="llama_stack.providers.remote.vector_io.chroma",
config_class="llama_stack.providers.remote.vector_io.chroma.ChromaRemoteImplConfig", config_class="llama_stack.providers.remote.vector_io.chroma.ChromaVectorIOConfig",
), ),
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),
@ -77,7 +77,7 @@ def available_providers() -> List[ProviderSpec]:
provider_type="inline::chromadb", provider_type="inline::chromadb",
pip_packages=EMBEDDING_DEPS + ["chromadb"], pip_packages=EMBEDDING_DEPS + ["chromadb"],
module="llama_stack.providers.inline.vector_io.chroma", module="llama_stack.providers.inline.vector_io.chroma",
config_class="llama_stack.providers.inline.vector_io.chroma.ChromaInlineImplConfig", config_class="llama_stack.providers.inline.vector_io.chroma.ChromaVectorIOConfig",
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),
remote_provider_spec( remote_provider_spec(
@ -86,7 +86,7 @@ def available_providers() -> List[ProviderSpec]:
adapter_type="pgvector", adapter_type="pgvector",
pip_packages=EMBEDDING_DEPS + ["psycopg2-binary"], pip_packages=EMBEDDING_DEPS + ["psycopg2-binary"],
module="llama_stack.providers.remote.vector_io.pgvector", module="llama_stack.providers.remote.vector_io.pgvector",
config_class="llama_stack.providers.remote.vector_io.pgvector.PGVectorConfig", config_class="llama_stack.providers.remote.vector_io.pgvector.PGVectorVectorIOConfig",
), ),
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),
@ -96,7 +96,7 @@ def available_providers() -> List[ProviderSpec]:
adapter_type="weaviate", adapter_type="weaviate",
pip_packages=EMBEDDING_DEPS + ["weaviate-client"], pip_packages=EMBEDDING_DEPS + ["weaviate-client"],
module="llama_stack.providers.remote.vector_io.weaviate", module="llama_stack.providers.remote.vector_io.weaviate",
config_class="llama_stack.providers.remote.vector_io.weaviate.WeaviateConfig", config_class="llama_stack.providers.remote.vector_io.weaviate.WeaviateVectorIOConfig",
provider_data_validator="llama_stack.providers.remote.vector_io.weaviate.WeaviateRequestProviderData", provider_data_validator="llama_stack.providers.remote.vector_io.weaviate.WeaviateRequestProviderData",
), ),
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
@ -107,7 +107,7 @@ def available_providers() -> List[ProviderSpec]:
adapter_type="sample", adapter_type="sample",
pip_packages=[], pip_packages=[],
module="llama_stack.providers.remote.vector_io.sample", module="llama_stack.providers.remote.vector_io.sample",
config_class="llama_stack.providers.remote.vector_io.sample.SampleConfig", config_class="llama_stack.providers.remote.vector_io.sample.SampleVectorIOConfig",
), ),
api_dependencies=[], api_dependencies=[],
), ),
@ -117,7 +117,7 @@ def available_providers() -> List[ProviderSpec]:
adapter_type="qdrant", adapter_type="qdrant",
pip_packages=EMBEDDING_DEPS + ["qdrant-client"], pip_packages=EMBEDDING_DEPS + ["qdrant-client"],
module="llama_stack.providers.remote.vector_io.qdrant", module="llama_stack.providers.remote.vector_io.qdrant",
config_class="llama_stack.providers.remote.vector_io.qdrant.QdrantConfig", config_class="llama_stack.providers.remote.vector_io.qdrant.QdrantVectorIOConfig",
), ),
api_dependencies=[Api.inference], api_dependencies=[Api.inference],
), ),

View file

@ -88,7 +88,7 @@ class ChromaIndex(EmbeddingIndex):
class ChromaVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate): class ChromaVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
def __init__( def __init__(
self, self,
config: Union[ChromaVectorIOConfig, ChromaVectorIOConfig], config: ChromaVectorIOConfig,
inference_api: Api.inference, inference_api: Api.inference,
) -> None: ) -> None:
log.info(f"Initializing ChromaVectorIOAdapter with url: {config}") log.info(f"Initializing ChromaVectorIOAdapter with url: {config}")