From 277f8690ef2f7dab522e3f6da40719806a72a51d Mon Sep 17 00:00:00 2001 From: Ben Browning Date: Sat, 31 May 2025 21:24:04 -0400 Subject: [PATCH] fix: Responses streaming tools don't concatenate None and str (#2326) # What does this PR do? This adds a check to ensure we don't attempt to concatenate `None + str` or `str + None` when building up our arguments for streaming tool calls in the Responses API. ## Test Plan All existing tests pass with this change. Unit tests: ``` python -m pytest -s -v \ tests/unit/providers/agents/meta_reference/test_openai_responses.py ``` Integration tests: ``` llama stack run llama_stack/templates/together/run.yaml LLAMA_STACK_CONFIG=http://localhost:8321 \ python -m pytest -s -v \ tests/integration/agents/test_openai_responses.py \ --text-model meta-llama/Llama-4-Scout-17B-16E-Instruct ``` Verification tests: ``` llama stack run llama_stack/templates/together/run.yaml pytest -s -v 'tests/verifications/openai_api/test_responses.py' \ --base-url=http://localhost:8321/v1/openai/v1 \ --model meta-llama/Llama-4-Scout-17B-16E-Instruct ``` Additionally, the manual example using Codex CLI from #2325 now succeeds instead of throwing a 500 error. Closes #2325 Signed-off-by: Ben Browning --- .../inline/agents/meta_reference/openai_responses.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py index 1fcb1c461..19d7ea56f 100644 --- a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py @@ -492,7 +492,12 @@ class OpenAIResponsesImpl: for tool_call in chunk_choice.delta.tool_calls: response_tool_call = chat_response_tool_calls.get(tool_call.index, None) if response_tool_call: - response_tool_call.function.arguments += tool_call.function.arguments + # Don't attempt to concatenate arguments if we don't have any new arguments + if tool_call.function.arguments: + # Guard against an initial None argument before we concatenate + response_tool_call.function.arguments = ( + response_tool_call.function.arguments or "" + ) + tool_call.function.arguments else: tool_call_dict: dict[str, Any] = tool_call.model_dump() tool_call_dict.pop("type", None)