small cleanup and updating metadata inclusion in MemoryToolRuntimeImpl.query() to be more flexible in the future

Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
Francisco Javier Arceo 2025-06-25 14:54:05 -04:00
parent 0cc625560d
commit 7ed916dbb3
3 changed files with 17 additions and 23 deletions

View file

@ -149,9 +149,6 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, RAGToolRunti
] ]
for i, chunk in enumerate(chunks): for i, chunk in enumerate(chunks):
metadata = chunk.metadata metadata = chunk.metadata
# update chunk.metadata with the chunk.chunk_metadata if it exists
if chunk.chunk_metadata:
metadata = {**metadata, **chunk.chunk_metadata.dict()}
tokens += metadata.get("token_count", 0) tokens += metadata.get("token_count", 0)
tokens += metadata.get("metadata_token_count", 0) tokens += metadata.get("metadata_token_count", 0)
@ -161,21 +158,24 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, RAGToolRunti
) )
break break
metadata_fields_to_exclude_from_context = [ # Add useful keys from chunk_metadata to metadata and remove some from metadata
"created_timestamp", chunk_metadata_keys_to_include_from_context = [
"updated_timestamp", "chunk_id",
"chunk_window", "document_id",
"chunk_tokenizer", "source",
"chunk_embedding_model", ]
"chunk_embedding_dimension", metadata_keys_to_exclude_from_context = [
"token_count", "token_count",
"content_token_count",
"metadata_token_count", "metadata_token_count",
] ]
metadata_subset = { metadata_for_context = {}
k: v for k, v in metadata.items() if k not in metadata_fields_to_exclude_from_context and v for k in chunk_metadata_keys_to_include_from_context:
} metadata_for_context[k] = getattr(chunk.chunk_metadata, k)
text_content = query_config.chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_subset) for k in metadata:
if k not in metadata_keys_to_exclude_from_context:
metadata_for_context[k] = metadata[k]
text_content = query_config.chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_for_context)
picked.append(TextContentItem(text=text_content)) picked.append(TextContentItem(text=text_content))
picked.append(TextContentItem(text="END of knowledge_search tool results.\n")) picked.append(TextContentItem(text="END of knowledge_search tool results.\n"))

View file

@ -399,13 +399,7 @@ class SQLiteVecIndex(EmbeddingIndex):
filtered_items = [(doc_id, score) for doc_id, score in top_k_items if score >= score_threshold] filtered_items = [(doc_id, score) for doc_id, score in top_k_items if score >= score_threshold]
# Create a map of chunk_id to chunk for both responses # Create a map of chunk_id to chunk for both responses
chunk_map = {} chunk_map = {c.chunk_id: c for c in vector_response.chunks + keyword_response.chunks}
for c in vector_response.chunks:
chunk_id = c.chunk_id
chunk_map[chunk_id] = c
for c in keyword_response.chunks:
chunk_id = c.chunk_id
chunk_map[chunk_id] = c
# Use the map to look up chunks by their IDs # Use the map to look up chunks by their IDs
chunks = [] chunks = []

View file

@ -56,7 +56,7 @@ class TestRagQuery:
assert result is not None assert result is not None
expected_metadata_string = ( expected_metadata_string = (
"Metadata: {'key1': 'value1', 'document_id': 'doc1', 'chunk_id': 'chunk1', 'source': 'test_source'}" "Metadata: {'chunk_id': 'chunk1', 'document_id': 'doc1', 'source': 'test_source', 'key1': 'value1'}"
) )
assert expected_metadata_string in result.content[1].text assert expected_metadata_string in result.content[1].text
assert result.content is not None assert result.content is not None