Merge branch 'main' into agents-openai-migration

This commit is contained in:
Matthew Farrellee 2025-10-01 16:52:05 -04:00
commit 0327ef3daf
169 changed files with 66293 additions and 47432 deletions

View file

@ -129,13 +129,16 @@ class StreamingResponseOrchestrator:
messages = self.ctx.messages.copy()
while True:
# Text is the default response format for chat completion so don't need to pass it
# (some providers don't support non-empty response_format when tools are present)
response_format = None if self.ctx.response_format.type == "text" else self.ctx.response_format
completion_result = await self.inference_api.openai_chat_completion(
model=self.ctx.model,
messages=messages,
tools=self.ctx.chat_tools,
stream=True,
temperature=self.ctx.temperature,
response_format=self.ctx.response_format,
response_format=response_format,
)
# Process streaming chunks and build complete response
@ -352,8 +355,11 @@ class StreamingResponseOrchestrator:
# Emit arguments.done events for completed tool calls (differentiate between MCP and function calls)
for tool_call_index in sorted(chat_response_tool_calls.keys()):
tool_call = chat_response_tool_calls[tool_call_index]
# Ensure that arguments, if sent back to the inference provider, are not None
tool_call.function.arguments = tool_call.function.arguments or "{}"
tool_call_item_id = tool_call_item_ids[tool_call_index]
final_arguments = chat_response_tool_calls[tool_call_index].function.arguments or ""
final_arguments = tool_call.function.arguments
tool_call_name = chat_response_tool_calls[tool_call_index].function.name
# Check if this is an MCP tool call