fix: Restore previous responses to input list, not messages

This adjusts the restoration of previous responses to prepend them to
the list of Responses API inputs instead of our converted list of Chat
Completion messages. This matches the expected behavior of the
Responses API, and I misinterpreted the nuances here in the initial implementation.

Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
Ben Browning 2025-05-02 11:08:09 -04:00
parent 467fb19345
commit 263eb6fd37
7 changed files with 400 additions and 303 deletions

View file

@ -29,7 +29,7 @@ from llama_stack.apis.tools import ToolDef
from llama_stack.schema_utils import json_schema_type, register_schema, webmethod
from .openai_responses import (
OpenAIResponseInputMessage,
OpenAIResponseInput,
OpenAIResponseInputTool,
OpenAIResponseObject,
OpenAIResponseObjectStream,
@ -588,7 +588,7 @@ class Agents(Protocol):
@webmethod(route="/openai/v1/responses", method="POST")
async def create_openai_response(
self,
input: str | list[OpenAIResponseInputMessage],
input: str | list[OpenAIResponseInput],
model: str,
previous_response_id: str | None = None,
store: bool | None = True,