removed skip_if_provider_doesnt_support_openai_vector_store_files_api, updated chroma _get_and_cache_vector_db_index, and updated test_query_unregistered_raises

Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
Francisco Javier Arceo 2025-07-24 21:29:15 -04:00
parent 2defebc835
commit c7ffe98588
3 changed files with 16 additions and 36 deletions

View file

@ -58,7 +58,6 @@ class ChromaIndex(EmbeddingIndex):
self.kvstore = kvstore self.kvstore = kvstore
async def initialize(self): async def initialize(self):
# Chroma does not require explicit initialization, this is just a helper for unit tests
pass pass
async def add_chunks(self, chunks: list[Chunk], embeddings: NDArray): async def add_chunks(self, chunks: list[Chunk], embeddings: NDArray):
@ -215,18 +214,12 @@ class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolP
if vector_db_id in self.cache: if vector_db_id in self.cache:
return self.cache[vector_db_id] return self.cache[vector_db_id]
try: vector_db = await self.vector_db_store.get_vector_db(vector_db_id)
collection = await maybe_await(self.client.get_collection(vector_db_id)) if not vector_db:
if not collection: raise ValueError(f"Vector DB {vector_db_id} not found in Llama Stack")
raise ValueError(f"Vector DB {vector_db_id} not found in Chroma") collection = await maybe_await(self.client.get_collection(vector_db_id))
if not collection:
vector_db = await self.vector_db_store.get_vector_db(vector_db_id) raise ValueError(f"Vector DB {vector_db_id} not found in Chroma")
if not vector_db: index = VectorDBWithIndex(vector_db, ChromaIndex(self.client, collection), self.inference_api)
raise ValueError(f"Vector DB {vector_db_id} not found in Llama Stack") self.cache[vector_db_id] = index
return index
index = VectorDBWithIndex(vector_db, ChromaIndex(self.client, collection), self.inference_api)
self.cache[vector_db_id] = index
return index
except Exception as exc:
raise ValueError(f"Vector DB {vector_db_id} not found in Chroma") from exc

View file

@ -35,15 +35,6 @@ def skip_if_provider_doesnt_support_openai_vector_stores(client_with_models):
pytest.skip("OpenAI vector stores are not supported by any provider") pytest.skip("OpenAI vector stores are not supported by any provider")
def skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models):
vector_io_providers = [p for p in client_with_models.providers.list() if p.api == "vector_io"]
for p in vector_io_providers:
if p.provider_type in []:
return
pytest.skip("OpenAI vector stores are not supported by any provider")
@pytest.fixture @pytest.fixture
def openai_client(client_with_models): def openai_client(client_with_models):
base_url = f"{client_with_models.base_url}/v1/openai/v1" base_url = f"{client_with_models.base_url}/v1/openai/v1"
@ -459,7 +450,6 @@ def test_openai_vector_store_search_with_max_num_results(
def test_openai_vector_store_attach_file(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_attach_file(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store attach file.""" """Test OpenAI vector store attach file."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient")
@ -511,7 +501,6 @@ def test_openai_vector_store_attach_file(compat_client_with_empty_stores, client
def test_openai_vector_store_attach_files_on_creation(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_attach_files_on_creation(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store attach files on creation.""" """Test OpenAI vector store attach files on creation."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient")
@ -568,7 +557,6 @@ def test_openai_vector_store_attach_files_on_creation(compat_client_with_empty_s
def test_openai_vector_store_list_files(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_list_files(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store list files.""" """Test OpenAI vector store list files."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient")
@ -642,7 +630,6 @@ def test_openai_vector_store_list_files_invalid_vector_store(compat_client_with_
def test_openai_vector_store_retrieve_file_contents(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_retrieve_file_contents(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store retrieve file contents.""" """Test OpenAI vector store retrieve file contents."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files retrieve contents is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files retrieve contents is not yet supported with LlamaStackClient")
@ -684,7 +671,6 @@ def test_openai_vector_store_retrieve_file_contents(compat_client_with_empty_sto
def test_openai_vector_store_delete_file(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_delete_file(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store delete file.""" """Test OpenAI vector store delete file."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient")
@ -742,7 +728,6 @@ def test_openai_vector_store_delete_file(compat_client_with_empty_stores, client
def test_openai_vector_store_delete_file_removes_from_vector_store(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_delete_file_removes_from_vector_store(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store delete file removes from vector store.""" """Test OpenAI vector store delete file removes from vector store."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient")
@ -784,7 +769,6 @@ def test_openai_vector_store_delete_file_removes_from_vector_store(compat_client
def test_openai_vector_store_update_file(compat_client_with_empty_stores, client_with_models): def test_openai_vector_store_update_file(compat_client_with_empty_stores, client_with_models):
"""Test OpenAI vector store update file.""" """Test OpenAI vector store update file."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files update is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files update is not yet supported with LlamaStackClient")
@ -833,7 +817,6 @@ def test_create_vector_store_files_duplicate_vector_store_name(compat_client_wit
This test confirms that client.vector_stores.create() creates a unique ID This test confirms that client.vector_stores.create() creates a unique ID
""" """
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models) skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
if isinstance(compat_client_with_empty_stores, LlamaStackClient): if isinstance(compat_client_with_empty_stores, LlamaStackClient):
pytest.skip("Vector Store Files create is not yet supported with LlamaStackClient") pytest.skip("Vector Store Files create is not yet supported with LlamaStackClient")

View file

@ -86,10 +86,14 @@ async def test_register_and_unregister_vector_db(vector_io_adapter):
assert dummy.identifier not in vector_io_adapter.cache assert dummy.identifier not in vector_io_adapter.cache
async def test_query_unregistered_raises(vector_io_adapter): async def test_query_unregistered_raises(vector_io_adapter, vector_provider):
fake_emb = np.zeros(8, dtype=np.float32) fake_emb = np.zeros(8, dtype=np.float32)
with pytest.raises(ValueError): if vector_provider == "chroma":
await vector_io_adapter.query_chunks("no_such_db", fake_emb) with pytest.raises(AttributeError):
await vector_io_adapter.query_chunks("no_such_db", fake_emb)
else:
with pytest.raises(ValueError):
await vector_io_adapter.query_chunks("no_such_db", fake_emb)
async def test_insert_chunks_calls_underlying_index(vector_io_adapter): async def test_insert_chunks_calls_underlying_index(vector_io_adapter):