fix tests

This commit is contained in:
Swapna Lekkala 2025-10-10 09:38:33 -07:00
parent b5c951fa4b
commit 7b5dd54b7a
4 changed files with 13 additions and 8 deletions

View file

@ -345,7 +345,7 @@ class OpenAIResponsesImpl:
return
# Structured outputs
response_format = convert_response_text_to_chat_response_format(text)
response_format = await convert_response_text_to_chat_response_format(text)
ctx = ChatCompletionContext(
model=model,

View file

@ -13,8 +13,8 @@ from llama_stack.apis.agents.openai_responses import (
ApprovalFilter,
MCPListToolsTool,
OpenAIResponseContentPartOutputText,
OpenAIResponseError,
OpenAIResponseContentPartRefusal,
OpenAIResponseError,
OpenAIResponseInputTool,
OpenAIResponseInputToolMCP,
OpenAIResponseMCPApprovalRequest,
@ -226,6 +226,11 @@ class StreamingResponseOrchestrator:
completion_result_data = stream_event_or_result
else:
yield stream_event_or_result
# If violation detected, skip the rest of processing since we already sent refusal
if self.violation_detected:
return
if not completion_result_data:
raise ValueError("Streaming chunk processor failed to return completion data")
last_completion_result = completion_result_data

View file

@ -178,7 +178,7 @@ async def convert_response_input_to_chat_messages(
pass
else:
content = await convert_response_content_to_chat_content(input_item.content)
message_type = get_message_type_by_role(input_item.role)
message_type = await get_message_type_by_role(input_item.role)
if message_type is None:
raise ValueError(
f"Llama Stack OpenAI Responses does not yet support message role '{input_item.role}' in this context"