close connections

This commit is contained in:
Raghotham Murthy 2025-10-07 08:29:06 -07:00
parent 22077e7f32
commit fa100c77fd
7 changed files with 13 additions and 5 deletions

View file

@ -314,7 +314,7 @@ class MetaReferenceAgentsImpl(Agents):
return paginate_records(session_dicts, start_index, limit) return paginate_records(session_dicts, start_index, limit)
async def shutdown(self) -> None: async def shutdown(self) -> None:
pass await self.persistence_store.close()
# OpenAI responses # OpenAI responses
async def get_openai_response( async def get_openai_response(

View file

@ -129,6 +129,7 @@ class ReferenceBatchesImpl(Batches):
# don't cancel tasks - just let them stop naturally on shutdown # don't cancel tasks - just let them stop naturally on shutdown
# cancelling would mark batches as "cancelled" in the database # cancelling would mark batches as "cancelled" in the database
logger.info(f"Shutdown initiated with {len(self._processing_tasks)} active batch processing tasks") logger.info(f"Shutdown initiated with {len(self._processing_tasks)} active batch processing tasks")
await self.kvstore.close()
# TODO (SECURITY): this currently works w/ configured api keys, not with x-llamastack-provider-data or with user policy restrictions # TODO (SECURITY): this currently works w/ configured api keys, not with x-llamastack-provider-data or with user policy restrictions
async def create_batch( async def create_batch(

View file

@ -64,7 +64,8 @@ class MetaReferenceEvalImpl(
benchmark = Benchmark.model_validate_json(benchmark) benchmark = Benchmark.model_validate_json(benchmark)
self.benchmarks[benchmark.identifier] = benchmark self.benchmarks[benchmark.identifier] = benchmark
async def shutdown(self) -> None: ... async def shutdown(self) -> None:
await self.kvstore.close()
async def register_benchmark(self, task_def: Benchmark) -> None: async def register_benchmark(self, task_def: Benchmark) -> None:
# Store in kvstore # Store in kvstore

View file

@ -19,3 +19,7 @@ class KVStore(Protocol):
async def values_in_range(self, start_key: str, end_key: str) -> list[str]: ... async def values_in_range(self, start_key: str, end_key: str) -> list[str]: ...
async def keys_in_range(self, start_key: str, end_key: str) -> list[str]: ... async def keys_in_range(self, start_key: str, end_key: str) -> list[str]: ...
async def close(self) -> None:
"""Close any persistent connections. Optional method for cleanup."""
...

View file

@ -43,6 +43,10 @@ class InmemoryKVStoreImpl(KVStore):
async def delete(self, key: str) -> None: async def delete(self, key: str) -> None:
del self._store[key] del self._store[key]
async def close(self) -> None:
"""No-op for in-memory store."""
pass
async def kvstore_impl(config: KVStoreConfig) -> KVStore: async def kvstore_impl(config: KVStoreConfig) -> KVStore:
if config.type == KVStoreType.redis.value: if config.type == KVStoreType.redis.value:

View file

@ -18,6 +18,7 @@ async def sqlite_kvstore(tmp_path):
kvstore = SqliteKVStoreImpl(kvstore_config) kvstore = SqliteKVStoreImpl(kvstore_config)
await kvstore.initialize() await kvstore.initialize()
yield kvstore yield kvstore
await kvstore.close()
@pytest.fixture(scope="function") @pytest.fixture(scope="function")

View file

@ -46,12 +46,9 @@ async def test_initialize_index(vector_index):
async def test_add_chunks_query_vector(vector_index, sample_chunks, sample_embeddings): async def test_add_chunks_query_vector(vector_index, sample_chunks, sample_embeddings):
vector_index.delete()
vector_index.initialize()
await vector_index.add_chunks(sample_chunks, sample_embeddings) await vector_index.add_chunks(sample_chunks, sample_embeddings)
resp = await vector_index.query_vector(sample_embeddings[0], k=1, score_threshold=-1) resp = await vector_index.query_vector(sample_embeddings[0], k=1, score_threshold=-1)
assert resp.chunks[0].content == sample_chunks[0].content assert resp.chunks[0].content == sample_chunks[0].content
vector_index.delete()
async def test_chunk_id_conflict(vector_index, sample_chunks, embedding_dimension): async def test_chunk_id_conflict(vector_index, sample_chunks, embedding_dimension):