mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 14:08:00 +00:00
no-defensive
This commit is contained in:
parent
674478c851
commit
a4b8eed576
1 changed files with 9 additions and 10 deletions
|
@ -531,16 +531,15 @@ class OpenAIResponsesImpl:
|
||||||
|
|
||||||
# Emit function_call_arguments.done events for completed tool calls
|
# Emit function_call_arguments.done events for completed tool calls
|
||||||
for tool_call_index in sorted(chat_response_tool_calls.keys()):
|
for tool_call_index in sorted(chat_response_tool_calls.keys()):
|
||||||
if tool_call_index in tool_call_item_ids:
|
tool_call_item_id = tool_call_item_ids[tool_call_index]
|
||||||
tool_call_item_id = tool_call_item_ids[tool_call_index]
|
final_arguments = chat_response_tool_calls[tool_call_index].function.arguments or ""
|
||||||
final_arguments = chat_response_tool_calls[tool_call_index].function.arguments or ""
|
sequence_number += 1
|
||||||
sequence_number += 1
|
yield OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone(
|
||||||
yield OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone(
|
arguments=final_arguments,
|
||||||
arguments=final_arguments,
|
item_id=tool_call_item_id,
|
||||||
item_id=tool_call_item_id,
|
output_index=len(output_messages),
|
||||||
output_index=len(output_messages),
|
sequence_number=sequence_number,
|
||||||
sequence_number=sequence_number,
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# Convert collected chunks to complete response
|
# Convert collected chunks to complete response
|
||||||
if chat_response_tool_calls:
|
if chat_response_tool_calls:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue