add back 2/n

This commit is contained in:
Xi Yan 2025-02-20 17:40:39 -08:00
parent cd36a77e20
commit ee3c174bb3

View file

@ -31,6 +31,7 @@ from llama_stack.apis.agents import (
AgentTurnResponseStepProgressPayload, AgentTurnResponseStepProgressPayload,
AgentTurnResponseStepStartPayload, AgentTurnResponseStepStartPayload,
AgentTurnResponseStreamChunk, AgentTurnResponseStreamChunk,
AgentTurnResponseTurnAwaitingInputPayload,
AgentTurnResponseTurnCompletePayload, AgentTurnResponseTurnCompletePayload,
AgentTurnResponseTurnStartPayload, AgentTurnResponseTurnStartPayload,
Attachment, Attachment,
@ -227,13 +228,23 @@ class ChatAgent(ShieldRunnerMixin):
) )
await self.storage.add_turn_to_session(request.session_id, turn) await self.storage.add_turn_to_session(request.session_id, turn)
chunk = AgentTurnResponseStreamChunk( if output_message.tool_calls:
event=AgentTurnResponseEvent( chunk = AgentTurnResponseStreamChunk(
payload=AgentTurnResponseTurnCompletePayload( event=AgentTurnResponseEvent(
turn=turn, payload=AgentTurnResponseTurnAwaitingInputPayload(
turn=turn,
)
) )
) )
) else:
chunk = AgentTurnResponseStreamChunk(
event=AgentTurnResponseEvent(
payload=AgentTurnResponseTurnCompletePayload(
turn=turn,
)
)
)
yield chunk yield chunk
async def continue_turn(self, request: AgentTurnContinueRequest) -> AsyncGenerator: async def continue_turn(self, request: AgentTurnContinueRequest) -> AsyncGenerator: