rename response to responses in verifications, update provider

This commit is contained in:
Ashwin Bharambe 2025-04-28 10:46:09 -07:00
parent 78da66016f
commit ae012bb857
3 changed files with 13 additions and 13 deletions

View file

@ -1,8 +1,8 @@
version: '2'
image_name: openai-api-verification
apis:
- agents
- inference
- openai_responses
- telemetry
- tool_runtime
- vector_io
@ -46,14 +46,14 @@ providers:
service_name: "${env.OTEL_SERVICE_NAME:\u200B}"
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
sqlite_db_path: ${env.SQLITE_DB_PATH:~/.llama/distributions/openai/trace_store.db}
openai_responses:
- provider_id: openai-responses
provider_type: inline::openai-responses
agents:
- provider_id: meta-reference
provider_type: inline::meta-reference
config:
kvstore:
persistence_store:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/openai}/openai_responses.db
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/openai}/agents_store.db
tool_runtime:
- provider_id: brave-search
provider_type: remote::brave-search

View file

@ -14,12 +14,12 @@ from tests.verifications.openai_api.fixtures.fixtures import (
)
from tests.verifications.openai_api.fixtures.load import load_test_cases
response_test_cases = load_test_cases("response")
responses_test_cases = load_test_cases("responses")
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_basic"]["test_params"]["case"],
responses_test_cases["test_response_basic"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_non_streaming_basic(request, openai_client, model, provider, verification_config, case):
@ -48,7 +48,7 @@ def test_response_non_streaming_basic(request, openai_client, model, provider, v
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_basic"]["test_params"]["case"],
responses_test_cases["test_response_basic"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_streaming_basic(request, openai_client, model, provider, verification_config, case):
@ -77,7 +77,7 @@ def test_response_streaming_basic(request, openai_client, model, provider, verif
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_multi_turn"]["test_params"]["case"],
responses_test_cases["test_response_multi_turn"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_non_streaming_multi_turn(request, openai_client, model, provider, verification_config, case):
@ -100,7 +100,7 @@ def test_response_non_streaming_multi_turn(request, openai_client, model, provid
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_web_search"]["test_params"]["case"],
responses_test_cases["test_response_web_search"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_non_streaming_web_search(request, openai_client, model, provider, verification_config, case):
@ -126,7 +126,7 @@ def test_response_non_streaming_web_search(request, openai_client, model, provid
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_image"]["test_params"]["case"],
responses_test_cases["test_response_image"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_non_streaming_image(request, openai_client, model, provider, verification_config, case):
@ -145,7 +145,7 @@ def test_response_non_streaming_image(request, openai_client, model, provider, v
@pytest.mark.parametrize(
"case",
response_test_cases["test_response_multi_turn_image"]["test_params"]["case"],
responses_test_cases["test_response_multi_turn_image"]["test_params"]["case"],
ids=case_id_generator,
)
def test_response_non_streaming_multi_turn_image(request, openai_client, model, provider, verification_config, case):