mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-15 04:22:38 +00:00
feat(responses)!: improve responses + conversations implementations
This PR updates the Conversation item related types and improves a couple critical parts of the implemenation: - it creates a streaming output item for the final assistant message output by the model. until now we only added content parts and included that message in the final response. - rewrites the conversation update code completely to account for items other than messages (tool calls, outputs, etc.)
This commit is contained in:
parent
d875e427bf
commit
d47f2c0ba8
11 changed files with 511 additions and 441 deletions
|
|
@ -255,6 +255,12 @@ class ConversationServiceImpl(Conversations):
|
|||
|
||||
async def list(self, conversation_id: str, after=NOT_GIVEN, include=NOT_GIVEN, limit=NOT_GIVEN, order=NOT_GIVEN):
|
||||
"""List items in the conversation."""
|
||||
if not conversation_id:
|
||||
raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}")
|
||||
|
||||
# check if conversation exists
|
||||
await self.get_conversation(conversation_id)
|
||||
|
||||
result = await self.sql_store.fetch_all(table="conversation_items", where={"conversation_id": conversation_id})
|
||||
records = result.data
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue