mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-17 10:28:11 +00:00
feat: Add missing Vector Store Files API surface
This adds the ability to list, retrieve, update, and delete Vector Store Files. It implements these new APIs for the faiss and sqlite-vec providers, since those are the two that also have the rest of the vector store files implementation. There are a number of new integration tests added, which I ran for each provider as outlined below. faiss (from ollama distro): ``` INFERENCE_MODEL="meta-llama/Llama-3.2-3B-Instruct" \ llama stack run llama_stack/templates/ollama/run.yaml LLAMA_STACK_CONFIG=http://localhost:8321 \ pytest -sv tests/integration/vector_io/test_openai_vector_stores.py \ --embedding-model=all-MiniLM-L6-v2 ``` sqlite-vec (from starter distro): ``` llama stack run llama_stack/templates/starter/run.yaml LLAMA_STACK_CONFIG=http://localhost:8321 \ pytest -sv tests/integration/vector_io/test_openai_vector_stores.py \ --embedding-model=all-MiniLM-L6-v2 ``` Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
parent
c20388c424
commit
459d50a365
11 changed files with 1248 additions and 20 deletions
|
@ -6,8 +6,11 @@
|
|||
|
||||
import logging
|
||||
import time
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
from llama_stack_client import BadRequestError, LlamaStackClient
|
||||
from openai import BadRequestError as OpenAIBadRequestError
|
||||
from openai import OpenAI
|
||||
|
||||
from llama_stack.apis.vector_io import Chunk
|
||||
|
@ -73,11 +76,23 @@ def compat_client_with_empty_stores(compat_client):
|
|||
logger.warning("Failed to clear vector stores")
|
||||
pass
|
||||
|
||||
def clear_files():
|
||||
try:
|
||||
response = compat_client.files.list()
|
||||
for file in response.data:
|
||||
compat_client.files.delete(file_id=file.id)
|
||||
except Exception:
|
||||
# If the API is not available or fails, just continue
|
||||
logger.warning("Failed to clear files")
|
||||
pass
|
||||
|
||||
clear_vector_stores()
|
||||
clear_files()
|
||||
yield compat_client
|
||||
|
||||
# Clean up after the test
|
||||
clear_vector_stores()
|
||||
clear_files()
|
||||
|
||||
|
||||
def test_openai_create_vector_store(compat_client_with_empty_stores, client_with_models):
|
||||
|
@ -423,3 +438,204 @@ def test_openai_vector_store_search_with_max_num_results(
|
|||
|
||||
assert search_response is not None
|
||||
assert len(search_response.data) == 2
|
||||
|
||||
|
||||
def test_openai_vector_store_attach_file_response_attributes(compat_client_with_empty_stores, client_with_models):
|
||||
"""Test OpenAI vector store attach file."""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files attach is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
# Create a vector store
|
||||
vector_store = compat_client.vector_stores.create(name="test_store")
|
||||
|
||||
# Create a file
|
||||
test_content = b"This is a test file"
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = "openai_test.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
)
|
||||
|
||||
assert file_attach_response
|
||||
assert file_attach_response.object == "vector_store.file"
|
||||
assert file_attach_response.id == file.id
|
||||
assert file_attach_response.vector_store_id == vector_store.id
|
||||
assert file_attach_response.status == "completed"
|
||||
assert file_attach_response.chunking_strategy.type == "auto"
|
||||
assert file_attach_response.created_at > 0
|
||||
assert not file_attach_response.last_error
|
||||
|
||||
updated_vector_store = compat_client.vector_stores.retrieve(vector_store_id=vector_store.id)
|
||||
assert updated_vector_store.file_counts.completed == 1
|
||||
assert updated_vector_store.file_counts.total == 1
|
||||
assert updated_vector_store.file_counts.cancelled == 0
|
||||
assert updated_vector_store.file_counts.failed == 0
|
||||
assert updated_vector_store.file_counts.in_progress == 0
|
||||
|
||||
|
||||
def test_openai_vector_store_list_files(compat_client_with_empty_stores, client_with_models):
|
||||
"""Test OpenAI vector store list files."""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
# Create a vector store
|
||||
vector_store = compat_client.vector_stores.create(name="test_store")
|
||||
|
||||
# Create some files and attach them to the vector store
|
||||
file_ids = []
|
||||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
|
||||
compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
)
|
||||
file_ids.append(file.id)
|
||||
|
||||
files_list = compat_client.vector_stores.files.list(vector_store_id=vector_store.id)
|
||||
assert files_list
|
||||
assert files_list.object == "list"
|
||||
assert files_list.data
|
||||
assert len(files_list.data) == 3
|
||||
assert file_ids == [file.id for file in files_list.data]
|
||||
assert files_list.data[0].object == "vector_store.file"
|
||||
assert files_list.data[0].vector_store_id == vector_store.id
|
||||
assert files_list.data[0].status == "completed"
|
||||
assert files_list.data[0].chunking_strategy.type == "auto"
|
||||
assert files_list.data[0].created_at > 0
|
||||
assert not files_list.data[0].last_error
|
||||
|
||||
updated_vector_store = compat_client.vector_stores.retrieve(vector_store_id=vector_store.id)
|
||||
assert updated_vector_store.file_counts.completed == 3
|
||||
assert updated_vector_store.file_counts.total == 3
|
||||
assert updated_vector_store.file_counts.cancelled == 0
|
||||
assert updated_vector_store.file_counts.failed == 0
|
||||
assert updated_vector_store.file_counts.in_progress == 0
|
||||
|
||||
|
||||
def test_openai_vector_store_list_files_invalid_vector_store(compat_client_with_empty_stores, client_with_models):
|
||||
"""Test OpenAI vector store list files with invalid vector store ID."""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
with pytest.raises((BadRequestError, OpenAIBadRequestError)):
|
||||
compat_client.vector_stores.files.list(vector_store_id="abc123")
|
||||
|
||||
|
||||
def test_openai_vector_store_delete_file(compat_client_with_empty_stores, client_with_models):
|
||||
"""Test OpenAI vector store delete file."""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files list is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
# Create a vector store
|
||||
vector_store = compat_client.vector_stores.create(name="test_store")
|
||||
|
||||
# Create some files and attach them to the vector store
|
||||
file_ids = []
|
||||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
|
||||
compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
)
|
||||
file_ids.append(file.id)
|
||||
|
||||
files_list = compat_client.vector_stores.files.list(vector_store_id=vector_store.id)
|
||||
assert len(files_list.data) == 3
|
||||
|
||||
# Delete the first file
|
||||
delete_response = compat_client.vector_stores.files.delete(vector_store_id=vector_store.id, file_id=file_ids[0])
|
||||
assert delete_response
|
||||
assert delete_response.id == file_ids[0]
|
||||
assert delete_response.deleted is True
|
||||
assert delete_response.object == "vector_store.file.deleted"
|
||||
|
||||
updated_vector_store = compat_client.vector_stores.retrieve(vector_store_id=vector_store.id)
|
||||
assert updated_vector_store.file_counts.completed == 2
|
||||
assert updated_vector_store.file_counts.total == 2
|
||||
assert updated_vector_store.file_counts.cancelled == 0
|
||||
assert updated_vector_store.file_counts.failed == 0
|
||||
assert updated_vector_store.file_counts.in_progress == 0
|
||||
|
||||
# Delete the second file
|
||||
delete_response = compat_client.vector_stores.files.delete(vector_store_id=vector_store.id, file_id=file_ids[1])
|
||||
assert delete_response
|
||||
assert delete_response.id == file_ids[1]
|
||||
|
||||
updated_vector_store = compat_client.vector_stores.retrieve(vector_store_id=vector_store.id)
|
||||
assert updated_vector_store.file_counts.completed == 1
|
||||
assert updated_vector_store.file_counts.total == 1
|
||||
assert updated_vector_store.file_counts.cancelled == 0
|
||||
assert updated_vector_store.file_counts.failed == 0
|
||||
assert updated_vector_store.file_counts.in_progress == 0
|
||||
|
||||
|
||||
def test_openai_vector_store_update_file(compat_client_with_empty_stores, client_with_models):
|
||||
"""Test OpenAI vector store update file."""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files update is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
# Create a vector store
|
||||
vector_store = compat_client.vector_stores.create(name="test_store")
|
||||
|
||||
# Create a file
|
||||
test_content = b"This is a test file"
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = "openai_test.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
attributes={"foo": "bar"},
|
||||
)
|
||||
|
||||
assert file_attach_response.status == "completed"
|
||||
assert file_attach_response.attributes["foo"] == "bar"
|
||||
|
||||
# Update the file's attributes
|
||||
updated_response = compat_client.vector_stores.files.update(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
attributes={"foo": "baz"},
|
||||
)
|
||||
|
||||
assert updated_response.status == "completed"
|
||||
assert updated_response.attributes["foo"] == "baz"
|
||||
|
||||
# Ensure we can retrieve the file and see the updated attributes
|
||||
retrieved_file = compat_client.vector_stores.files.retrieve(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file.id,
|
||||
)
|
||||
assert retrieved_file.attributes["foo"] == "baz"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue