mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 23:29:47 +00:00
feat(responses): add MCP argument streaming and content part events
- Add content part events (response.content_part.added/done) for granular text streaming - Implement MCP-specific argument streaming (response.mcp_call.arguments.delta/done) - Differentiate between MCP and function call streaming events - Update unit and integration tests for new streaming events - Ensure proper event ordering and OpenAI spec compliance 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
3972ddfd43
commit
975e70cab3
4 changed files with 242 additions and 35 deletions
|
|
@ -623,6 +623,58 @@ class OpenAIResponseObjectStreamResponseMcpCallCompleted(BaseModel):
|
|||
type: Literal["response.mcp_call.completed"] = "response.mcp_call.completed"
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseContentPart(BaseModel):
|
||||
"""Base class for response content parts."""
|
||||
|
||||
id: str
|
||||
type: str
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseContentPartText(OpenAIResponseContentPart):
|
||||
"""Text content part for streaming responses."""
|
||||
|
||||
text: str
|
||||
type: Literal["text"] = "text"
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseObjectStreamResponseContentPartAdded(BaseModel):
|
||||
"""Streaming event for when a new content part is added to a response item.
|
||||
|
||||
:param response_id: Unique identifier of the response containing this content
|
||||
:param item_id: Unique identifier of the output item containing this content part
|
||||
:param part: The content part that was added
|
||||
:param sequence_number: Sequential number for ordering streaming events
|
||||
:param type: Event type identifier, always "response.content_part.added"
|
||||
"""
|
||||
|
||||
response_id: str
|
||||
item_id: str
|
||||
part: OpenAIResponseContentPart
|
||||
sequence_number: int
|
||||
type: Literal["response.content_part.added"] = "response.content_part.added"
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseObjectStreamResponseContentPartDone(BaseModel):
|
||||
"""Streaming event for when a content part is completed.
|
||||
|
||||
:param response_id: Unique identifier of the response containing this content
|
||||
:param item_id: Unique identifier of the output item containing this content part
|
||||
:param part: The completed content part
|
||||
:param sequence_number: Sequential number for ordering streaming events
|
||||
:param type: Event type identifier, always "response.content_part.done"
|
||||
"""
|
||||
|
||||
response_id: str
|
||||
item_id: str
|
||||
part: OpenAIResponseContentPart
|
||||
sequence_number: int
|
||||
type: Literal["response.content_part.done"] = "response.content_part.done"
|
||||
|
||||
|
||||
OpenAIResponseObjectStream = Annotated[
|
||||
OpenAIResponseObjectStreamResponseCreated
|
||||
| OpenAIResponseObjectStreamResponseOutputItemAdded
|
||||
|
|
@ -642,6 +694,8 @@ OpenAIResponseObjectStream = Annotated[
|
|||
| OpenAIResponseObjectStreamResponseMcpCallInProgress
|
||||
| OpenAIResponseObjectStreamResponseMcpCallFailed
|
||||
| OpenAIResponseObjectStreamResponseMcpCallCompleted
|
||||
| OpenAIResponseObjectStreamResponseContentPartAdded
|
||||
| OpenAIResponseObjectStreamResponseContentPartDone
|
||||
| OpenAIResponseObjectStreamResponseCompleted,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue