feat: Updating files/content response to return additional fields

Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
Francisco Javier Arceo 2025-08-06 16:55:14 -04:00
parent e12524af85
commit a19c16428f
143 changed files with 6907 additions and 15104 deletions

View file

@ -9,12 +9,6 @@ from openai import BadRequestError, OpenAI
from llama_stack.core.library_client import LlamaStackAsLibraryClient
@pytest.fixture
def openai_client(client_with_models):
base_url = f"{client_with_models.base_url}/v1/openai/v1"
return OpenAI(base_url=base_url, api_key="bar")
@pytest.mark.parametrize(
"stream",
[
@ -41,15 +35,14 @@ def openai_client(client_with_models):
],
],
)
def test_responses_store(openai_client, client_with_models, text_model_id, stream, tools):
if isinstance(client_with_models, LlamaStackAsLibraryClient):
pytest.skip("OpenAI responses are not supported when testing with library client yet.")
def test_responses_store(compat_client, text_model_id, stream, tools):
if not isinstance(compat_client, OpenAI):
pytest.skip("OpenAI client is required until responses.delete() exists in llama-stack-client")
client = openai_client
message = "What's the weather in Tokyo?" + (
" YOU MUST USE THE get_weather function to get the weather." if tools else ""
)
response = client.responses.create(
response = compat_client.responses.create(
model=text_model_id,
input=[
{
@ -78,14 +71,8 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
if output_type == "message":
content = response.output[0].content[0].text
# list responses - use the underlying HTTP client for endpoints not in SDK
list_response = client._client.get("/responses")
assert list_response.status_code == 200
data = list_response.json()["data"]
assert response_id in [r["id"] for r in data]
# test retrieve response
retrieved_response = client.responses.retrieve(response_id)
retrieved_response = compat_client.responses.retrieve(response_id)
assert retrieved_response.id == response_id
assert retrieved_response.model == text_model_id
assert retrieved_response.output[0].type == output_type, retrieved_response
@ -93,23 +80,19 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
assert retrieved_response.output[0].content[0].text == content
# Delete the response
delete_response = client.responses.delete(response_id)
delete_response = compat_client.responses.delete(response_id)
assert delete_response is None
with pytest.raises(BadRequestError):
client.responses.retrieve(response_id)
compat_client.responses.retrieve(response_id)
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
def test_list_response_input_items(compat_client, text_model_id):
"""Test the new list_openai_response_input_items endpoint."""
if isinstance(client_with_models, LlamaStackAsLibraryClient):
pytest.skip("OpenAI responses are not supported when testing with library client yet.")
client = openai_client
message = "What is the capital of France?"
# Create a response first
response = client.responses.create(
response = compat_client.responses.create(
model=text_model_id,
input=[
{
@ -123,7 +106,7 @@ def test_list_response_input_items(openai_client, client_with_models, text_model
response_id = response.id
# Test the new list input items endpoint
input_items_response = client.responses.input_items.list(response_id=response_id)
input_items_response = compat_client.responses.input_items.list(response_id=response_id)
# Verify the structure follows OpenAI API spec
assert input_items_response.object == "list"