lint fix and rename file search

Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
Francisco Javier Arceo 2025-10-24 20:24:03 -04:00
parent 2d9163529a
commit bb34e942d1
7 changed files with 26 additions and 25 deletions

View file

@ -41,7 +41,7 @@ class ToolRuntimeRouter(ToolRuntime):
query_config: FileSearchConfig | None = None, query_config: FileSearchConfig | None = None,
) -> FileSearchResult: ) -> FileSearchResult:
logger.debug(f"ToolRuntimeRouter.FileSearchToolImpl.query: {vector_store_ids}") logger.debug(f"ToolRuntimeRouter.FileSearchToolImpl.query: {vector_store_ids}")
provider = await self.routing_table.get_provider_impl("knowledge_search") provider = await self.routing_table.get_provider_impl("file_search")
return await provider.query(content, vector_store_ids, query_config) return await provider.query(content, vector_store_ids, query_config)
async def insert( async def insert(

View file

@ -18,7 +18,7 @@ logger = get_logger(name=__name__, category="core::routing_tables")
def parse_toolgroup_from_toolgroup_name_pair(toolgroup_name_with_maybe_tool_name: str) -> str | None: def parse_toolgroup_from_toolgroup_name_pair(toolgroup_name_with_maybe_tool_name: str) -> str | None:
# handle the funny case like "builtin::file_search/knowledge_search" # handle the funny case like "builtin::file_search/file_search"
parts = toolgroup_name_with_maybe_tool_name.split("/") parts = toolgroup_name_with_maybe_tool_name.split("/")
if len(parts) == 2: if len(parts) == 2:
return parts[0] return parts[0]

View file

@ -84,7 +84,7 @@ from .persistence import AgentPersistence
from .safety import SafetyException, ShieldRunnerMixin from .safety import SafetyException, ShieldRunnerMixin
TOOLS_ATTACHMENT_KEY_REGEX = re.compile(r"__tools_attachment__=(\{.*?\})") TOOLS_ATTACHMENT_KEY_REGEX = re.compile(r"__tools_attachment__=(\{.*?\})")
MEMORY_QUERY_TOOL = "knowledge_search" MEMORY_QUERY_TOOL = "file_search"
WEB_SEARCH_TOOL = "web_search" WEB_SEARCH_TOOL = "web_search"
RAG_TOOL_GROUP = "builtin::file_search" RAG_TOOL_GROUP = "builtin::file_search"
@ -927,7 +927,7 @@ class ChatAgent(ShieldRunnerMixin):
"""Parse a toolgroup name into its components. """Parse a toolgroup name into its components.
Args: Args:
toolgroup_name: The toolgroup name to parse (e.g. "builtin::file_search/knowledge_search") toolgroup_name: The toolgroup name to parse (e.g. "builtin::file_search/file_search")
Returns: Returns:
A tuple of (tool_type, tool_group, tool_name) A tuple of (tool_type, tool_group, tool_name)

View file

@ -675,7 +675,7 @@ class StreamingResponseOrchestrator:
# Emit output_item.added event for the new function call # Emit output_item.added event for the new function call
self.sequence_number += 1 self.sequence_number += 1
is_mcp_tool = tool_call.function.name and tool_call.function.name in self.mcp_tool_to_server is_mcp_tool = tool_call.function.name and tool_call.function.name in self.mcp_tool_to_server
if not is_mcp_tool and tool_call.function.name not in ["web_search", "knowledge_search"]: if not is_mcp_tool and tool_call.function.name not in ["web_search", "file_search"]:
# for MCP tools (and even other non-function tools) we emit an output message item later # for MCP tools (and even other non-function tools) we emit an output message item later
function_call_item = OpenAIResponseOutputMessageFunctionToolCall( function_call_item = OpenAIResponseOutputMessageFunctionToolCall(
arguments="", # Will be filled incrementally via delta events arguments="", # Will be filled incrementally via delta events
@ -900,7 +900,7 @@ class StreamingResponseOrchestrator:
id=matching_item_id, id=matching_item_id,
status="in_progress", status="in_progress",
) )
elif tool_call.function.name == "knowledge_search": elif tool_call.function.name == "file_search":
item = OpenAIResponseOutputMessageFileSearchToolCall( item = OpenAIResponseOutputMessageFileSearchToolCall(
id=matching_item_id, id=matching_item_id,
status="in_progress", status="in_progress",
@ -1019,7 +1019,7 @@ class StreamingResponseOrchestrator:
raise ValueError(f"Tool {tool_name} not found") raise ValueError(f"Tool {tool_name} not found")
self.ctx.chat_tools.append(make_openai_tool(tool_name, tool)) self.ctx.chat_tools.append(make_openai_tool(tool_name, tool))
elif input_tool.type == "file_search": elif input_tool.type == "file_search":
tool_name = "knowledge_search" tool_name = "file_search"
tool = await self.tool_executor.tool_groups_api.get_tool(tool_name) tool = await self.tool_executor.tool_groups_api.get_tool(tool_name)
if not tool: if not tool:
raise ValueError(f"Tool {tool_name} not found") raise ValueError(f"Tool {tool_name} not found")

View file

@ -104,12 +104,12 @@ class ToolExecutor:
citation_files=result.metadata.get("citation_files") if result and result.metadata else None, citation_files=result.metadata.get("citation_files") if result and result.metadata else None,
) )
async def _execute_knowledge_search_via_vector_store( async def _execute_file_search_via_vector_store(
self, self,
query: str, query: str,
response_file_search_tool: OpenAIResponseInputToolFileSearch, response_file_search_tool: OpenAIResponseInputToolFileSearch,
) -> ToolInvocationResult: ) -> ToolInvocationResult:
"""Execute knowledge search using vector_stores.search API with filters support.""" """Execute file search using vector_stores.search API with filters support."""
search_results = [] search_results = []
# Create search tasks for all vector stores # Create search tasks for all vector stores
@ -139,7 +139,7 @@ class ToolExecutor:
content_items = [] content_items = []
content_items.append( content_items.append(
TextContentItem( TextContentItem(
text=f"knowledge_search tool found {len(search_results)} chunks:\nBEGIN of knowledge_search tool results.\n" text=f"file_search tool found {len(search_results)} chunks:\nBEGIN of file_search tool results.\n"
) )
) )
@ -158,7 +158,7 @@ class ToolExecutor:
content_items.append(TextContentItem(text=text_content)) content_items.append(TextContentItem(text=text_content))
unique_files.add(file_id) unique_files.add(file_id)
content_items.append(TextContentItem(text="END of knowledge_search tool results.\n")) content_items.append(TextContentItem(text="END of file_search tool results.\n"))
citation_instruction = "" citation_instruction = ""
if unique_files: if unique_files:
@ -224,7 +224,7 @@ class ToolExecutor:
output_index=output_index, output_index=output_index,
sequence_number=sequence_number, sequence_number=sequence_number,
) )
elif function_name == "knowledge_search": elif function_name == "file_search":
sequence_number += 1 sequence_number += 1
progress_event = OpenAIResponseObjectStreamResponseFileSearchCallInProgress( progress_event = OpenAIResponseObjectStreamResponseFileSearchCallInProgress(
item_id=item_id, item_id=item_id,
@ -246,7 +246,7 @@ class ToolExecutor:
yield ToolExecutionResult(stream_event=searching_event, sequence_number=sequence_number) yield ToolExecutionResult(stream_event=searching_event, sequence_number=sequence_number)
# For file search, emit searching event # For file search, emit searching event
if function_name == "knowledge_search": if function_name == "file_search":
sequence_number += 1 sequence_number += 1
searching_event = OpenAIResponseObjectStreamResponseFileSearchCallSearching( searching_event = OpenAIResponseObjectStreamResponseFileSearchCallSearching(
item_id=item_id, item_id=item_id,
@ -283,17 +283,17 @@ class ToolExecutor:
tool_name=function_name, tool_name=function_name,
kwargs=tool_kwargs, kwargs=tool_kwargs,
) )
elif function_name == "knowledge_search": elif function_name == "file_search":
response_file_search_tool = next( response_file_search_tool = next(
(t for t in ctx.response_tools if isinstance(t, OpenAIResponseInputToolFileSearch)), (t for t in ctx.response_tools if isinstance(t, OpenAIResponseInputToolFileSearch)),
None, None,
) )
if response_file_search_tool: if response_file_search_tool:
# Use vector_stores.search API instead of knowledge_search tool # Use vector_stores.search API instead of file_search tool
# to support filters and ranking_options # to support filters and ranking_options
query = tool_kwargs.get("query", "") query = tool_kwargs.get("query", "")
async with tracing.span("knowledge_search", {}): async with tracing.span("file_search", {}):
result = await self._execute_knowledge_search_via_vector_store( result = await self._execute_file_search_via_vector_store(
query=query, query=query,
response_file_search_tool=response_file_search_tool, response_file_search_tool=response_file_search_tool,
) )
@ -341,7 +341,7 @@ class ToolExecutor:
output_index=output_index, output_index=output_index,
sequence_number=sequence_number, sequence_number=sequence_number,
) )
elif function_name == "knowledge_search": elif function_name == "file_search":
sequence_number += 1 sequence_number += 1
completion_event = OpenAIResponseObjectStreamResponseFileSearchCallCompleted( completion_event = OpenAIResponseObjectStreamResponseFileSearchCallCompleted(
item_id=item_id, item_id=item_id,
@ -395,7 +395,7 @@ class ToolExecutor:
) )
if has_error: if has_error:
message.status = "failed" message.status = "failed"
elif function.name == "knowledge_search": elif function.name == "file_search":
message = OpenAIResponseOutputMessageFileSearchToolCall( message = OpenAIResponseOutputMessageFileSearchToolCall(
id=item_id, id=item_id,
queries=[tool_kwargs.get("query", "")], queries=[tool_kwargs.get("query", "")],

View file

@ -226,9 +226,7 @@ class FileSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, FileSear
tokens = 0 tokens = 0
picked: list[InterleavedContentItem] = [ picked: list[InterleavedContentItem] = [
TextContentItem( TextContentItem(text=f"file_search tool found {len(chunks)} chunks:\nBEGIN of file_search tool results.\n")
text=f"knowledge_search tool found {len(chunks)} chunks:\nBEGIN of knowledge_search tool results.\n"
)
] ]
for i, chunk in enumerate(chunks): for i, chunk in enumerate(chunks):
metadata = chunk.metadata metadata = chunk.metadata
@ -262,7 +260,7 @@ class FileSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, FileSear
text_content = query_config.chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_for_context) text_content = query_config.chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_for_context)
picked.append(TextContentItem(text=text_content)) picked.append(TextContentItem(text=text_content))
picked.append(TextContentItem(text="END of knowledge_search tool results.\n")) picked.append(TextContentItem(text="END of file_search tool results.\n"))
picked.append( picked.append(
TextContentItem( TextContentItem(
text=f'The above results were retrieved to help answer the user\'s query: "{interleaved_content_as_str(content)}". Use them as supporting information only in answering this query.\n', text=f'The above results were retrieved to help answer the user\'s query: "{interleaved_content_as_str(content)}". Use them as supporting information only in answering this query.\n',
@ -292,7 +290,7 @@ class FileSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, FileSear
description="Insert documents into memory", description="Insert documents into memory",
), ),
ToolDef( ToolDef(
name="knowledge_search", name="file_search",
description="Search for information in a database.", description="Search for information in a database.",
input_schema={ input_schema={
"type": "object", "type": "object",

View file

@ -1167,7 +1167,10 @@ export default function ChatPlaygroundPage() {
// find RAG toolgroups that have vector_db_ids configured // find RAG toolgroups that have vector_db_ids configured
const ragToolgroups = selectedAgentConfig.toolgroups.filter(toolgroup => { const ragToolgroups = selectedAgentConfig.toolgroups.filter(toolgroup => {
if (typeof toolgroup === "object" && toolgroup.name?.includes("file_search")) { if (
typeof toolgroup === "object" &&
toolgroup.name?.includes("file_search")
) {
return toolgroup.args && "vector_db_ids" in toolgroup.args; return toolgroup.args && "vector_db_ids" in toolgroup.args;
} }
return false; return false;