mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-27 14:38:49 +00:00
test(vector-io): enable chunk deletion test for vector stores
Remove xfail marker from test_openai_vector_store_delete_file_removes_from_vector_store now that chunk deletion functionality has been implemented Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
239c2a9eef
commit
05d3fffbdf
2 changed files with 0 additions and 8 deletions
|
@ -779,12 +779,6 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
file = await self.openai_retrieve_vector_store_file(vector_store_id, file_id)
|
||||
await self._delete_openai_vector_store_file_from_storage(vector_store_id, file_id)
|
||||
|
||||
# TODO: We need to actually delete the embeddings from the underlying vector store...
|
||||
# Also uncomment the corresponding integration test marked as xfail
|
||||
#
|
||||
# test_openai_vector_store_delete_file_removes_from_vector_store in
|
||||
# tests/integration/vector_io/test_openai_vector_stores.py
|
||||
|
||||
# Update in-memory cache
|
||||
store_info["file_ids"].remove(file_id)
|
||||
store_info["file_counts"][file.status] -= 1
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue