From 37a9d4fb05cf5243d2e847d3dfcb6cdccb495bdf Mon Sep 17 00:00:00 2001 From: Luis Tomas Bolivar Date: Fri, 3 Oct 2025 15:11:26 +0200 Subject: [PATCH] Add protection to response_format --- .../inline/agents/meta_reference/responses/streaming.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py index e80ffcdd1..540d36540 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py @@ -217,7 +217,9 @@ class StreamingResponseOrchestrator: while True: # Text is the default response format for chat completion so don't need to pass it # (some providers don't support non-empty response_format when tools are present) - response_format = None if self.ctx.response_format.type == "text" else self.ctx.response_format + response_format = ( + None if getattr(self.ctx.response_format, "type", None) == "text" else self.ctx.response_format + ) logger.debug(f"calling openai_chat_completion with tools: {self.ctx.chat_tools}") params = OpenAIChatCompletionRequestWithExtraBody(