OpenAI Responses API: Stub in basic web_search tool

This commit is contained in:
Ben Browning 2025-04-17 20:25:36 -04:00 committed by Ashwin Bharambe
parent 52a69f0bf9
commit 35b2e2646f
8 changed files with 232 additions and 15 deletions

View file

@ -14,6 +14,7 @@ from pathlib import Path
import pytest
import yaml
from llama_stack_client import LlamaStackClient
from openai import OpenAI
from llama_stack import LlamaStackAsLibraryClient
from llama_stack.apis.datatypes import Api
@ -207,3 +208,9 @@ def llama_stack_client(request, provider_data, text_model_id):
raise RuntimeError("Initialization failed")
return client
@pytest.fixture(scope="session")
def openai_client(client_with_models):
base_url = f"{client_with_models.base_url}/v1/openai/v1"
return OpenAI(base_url=base_url, api_key="fake")

View file

@ -6,17 +6,10 @@
import pytest
from openai import OpenAI
from ..test_cases.test_case import TestCase
@pytest.fixture
def openai_client(client_with_models):
base_url = f"{client_with_models.base_url}/v1/openai/v1"
return OpenAI(base_url=base_url, api_key="bar")
@pytest.mark.parametrize(
"test_case",
[
@ -24,7 +17,7 @@ def openai_client(client_with_models):
"openai:responses:non_streaming_02",
],
)
def test_openai_responses_non_streaming(openai_client, client_with_models, text_model_id, test_case):
def test_basic_non_streaming(openai_client, client_with_models, text_model_id, test_case):
tc = TestCase(test_case)
question = tc["question"]
expected = tc["expected"]
@ -55,7 +48,7 @@ def test_openai_responses_non_streaming(openai_client, client_with_models, text_
"openai:responses:streaming_02",
],
)
def test_openai_responses_streaming(openai_client, client_with_models, text_model_id, test_case):
def test_basic_streaming(openai_client, client_with_models, text_model_id, test_case):
tc = TestCase(test_case)
question = tc["question"]
expected = tc["expected"]

View file

@ -0,0 +1,38 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import pytest
from ..test_cases.test_case import TestCase
@pytest.mark.parametrize(
"test_case",
[
"openai:responses:tools_web_search_01",
],
)
def test_web_search_non_streaming(openai_client, client_with_models, text_model_id, test_case):
tc = TestCase(test_case)
input = tc["input"]
expected = tc["expected"]
tools = tc["tools"]
response = openai_client.responses.create(
model=text_model_id,
input=input,
tools=tools,
stream=False,
)
assert len(response.output) > 1
assert response.output[0].type == "web_search_call"
assert response.output[0].status == "completed"
assert response.output[1].type == "message"
assert response.output[1].status == "completed"
assert response.output[1].role == "assistant"
assert len(response.output[1].content) > 0
assert expected.lower() in response.output_text.lower().strip()

View file

@ -22,5 +22,16 @@
"question": "What is the name of the US captial?",
"expected": "Washington"
}
},
"tools_web_search_01": {
"data": {
"input": "How many experts does the Llama 4 Maverick model have?",
"tools": [
{
"type": "web_search"
}
],
"expected": "128"
}
}
}