mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
reverting some formatting
This commit is contained in:
parent
a23ee35b24
commit
6bd0d644d1
1 changed files with 14 additions and 26 deletions
|
|
@ -83,9 +83,7 @@ class ToolExecutor:
|
|||
yield event_result
|
||||
|
||||
# Execute the actual tool call
|
||||
error_exc, result = await self._execute_tool(
|
||||
function.name, tool_kwargs, ctx, mcp_tool_to_server
|
||||
)
|
||||
error_exc, result = await self._execute_tool(function.name, tool_kwargs, ctx, mcp_tool_to_server)
|
||||
|
||||
# Emit completion events for tool execution
|
||||
has_error = bool(
|
||||
|
|
@ -169,9 +167,7 @@ class ToolExecutor:
|
|||
if result_item.attributes:
|
||||
metadata_text += f", attributes: {result_item.attributes}"
|
||||
|
||||
text_content = (
|
||||
f"[{i + 1}] {metadata_text} (cite as <|{file_id}|>)\n{chunk_text}\n"
|
||||
)
|
||||
text_content = (f"[{i + 1}] {metadata_text} (cite as <|{file_id}|>)\n{chunk_text}\n")
|
||||
content_items.append(TextContentItem(text=text_content))
|
||||
unique_files.add(file_id)
|
||||
|
||||
|
|
@ -372,9 +368,7 @@ class ToolExecutor:
|
|||
mcp_completed_event = OpenAIResponseObjectStreamResponseMcpCallCompleted(
|
||||
sequence_number=sequence_number,
|
||||
)
|
||||
yield ToolExecutionResult(
|
||||
stream_event=mcp_completed_event, sequence_number=sequence_number
|
||||
)
|
||||
yield ToolExecutionResult(stream_event=mcp_completed_event, sequence_number=sequence_number)
|
||||
elif function_name == "web_search":
|
||||
sequence_number += 1
|
||||
web_completion_event = OpenAIResponseObjectStreamResponseWebSearchCallCompleted(
|
||||
|
|
@ -382,10 +376,7 @@ class ToolExecutor:
|
|||
output_index=output_index,
|
||||
sequence_number=sequence_number,
|
||||
)
|
||||
|
||||
yield ToolExecutionResult(
|
||||
stream_event=web_completion_event, sequence_number=sequence_number
|
||||
)
|
||||
yield ToolExecutionResult(stream_event=web_completion_event, sequence_number=sequence_number)
|
||||
elif function_name == "knowledge_search":
|
||||
sequence_number += 1
|
||||
file_completion_event = OpenAIResponseObjectStreamResponseFileSearchCallCompleted(
|
||||
|
|
@ -393,10 +384,7 @@ class ToolExecutor:
|
|||
output_index=output_index,
|
||||
sequence_number=sequence_number,
|
||||
)
|
||||
|
||||
yield ToolExecutionResult(
|
||||
stream_event=file_completion_event, sequence_number=sequence_number
|
||||
)
|
||||
yield ToolExecutionResult(stream_event=file_completion_event, sequence_number=sequence_number)
|
||||
|
||||
async def _build_result_messages(
|
||||
self,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue