mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-23 02:52:25 +00:00
chore(misc): make tests and starter faster
This commit is contained in:
parent
ea46f74092
commit
2b4e88a3de
19 changed files with 2860 additions and 1660 deletions
|
|
@ -4,17 +4,11 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
import pytest
|
||||
from openai import BadRequestError, OpenAI
|
||||
from openai import BadRequestError
|
||||
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def openai_client(client_with_models):
|
||||
base_url = f"{client_with_models.base_url}/v1/openai/v1"
|
||||
return OpenAI(base_url=base_url, api_key="bar")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"stream",
|
||||
[
|
||||
|
|
@ -41,15 +35,11 @@ def openai_client(client_with_models):
|
|||
],
|
||||
],
|
||||
)
|
||||
def test_responses_store(openai_client, client_with_models, text_model_id, stream, tools):
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||
pytest.skip("OpenAI responses are not supported when testing with library client yet.")
|
||||
|
||||
client = openai_client
|
||||
def test_responses_store(compat_client, text_model_id, stream, tools):
|
||||
message = "What's the weather in Tokyo?" + (
|
||||
" YOU MUST USE THE get_weather function to get the weather." if tools else ""
|
||||
)
|
||||
response = client.responses.create(
|
||||
response = compat_client.responses.create(
|
||||
model=text_model_id,
|
||||
input=[
|
||||
{
|
||||
|
|
@ -78,14 +68,8 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
|
|||
if output_type == "message":
|
||||
content = response.output[0].content[0].text
|
||||
|
||||
# list responses - use the underlying HTTP client for endpoints not in SDK
|
||||
list_response = client._client.get("/responses")
|
||||
assert list_response.status_code == 200
|
||||
data = list_response.json()["data"]
|
||||
assert response_id in [r["id"] for r in data]
|
||||
|
||||
# test retrieve response
|
||||
retrieved_response = client.responses.retrieve(response_id)
|
||||
retrieved_response = compat_client.responses.retrieve(response_id)
|
||||
assert retrieved_response.id == response_id
|
||||
assert retrieved_response.model == text_model_id
|
||||
assert retrieved_response.output[0].type == output_type, retrieved_response
|
||||
|
|
@ -93,23 +77,19 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
|
|||
assert retrieved_response.output[0].content[0].text == content
|
||||
|
||||
# Delete the response
|
||||
delete_response = client.responses.delete(response_id)
|
||||
delete_response = compat_client.responses.delete(response_id)
|
||||
assert delete_response is None
|
||||
|
||||
with pytest.raises(BadRequestError):
|
||||
client.responses.retrieve(response_id)
|
||||
compat_client.responses.retrieve(response_id)
|
||||
|
||||
|
||||
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
||||
def test_list_response_input_items(compat_client, text_model_id):
|
||||
"""Test the new list_openai_response_input_items endpoint."""
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||
pytest.skip("OpenAI responses are not supported when testing with library client yet.")
|
||||
|
||||
client = openai_client
|
||||
message = "What is the capital of France?"
|
||||
|
||||
# Create a response first
|
||||
response = client.responses.create(
|
||||
response = compat_client.responses.create(
|
||||
model=text_model_id,
|
||||
input=[
|
||||
{
|
||||
|
|
@ -123,7 +103,7 @@ def test_list_response_input_items(openai_client, client_with_models, text_model
|
|||
response_id = response.id
|
||||
|
||||
# Test the new list input items endpoint
|
||||
input_items_response = client.responses.input_items.list(response_id=response_id)
|
||||
input_items_response = compat_client.responses.input_items.list(response_id=response_id)
|
||||
|
||||
# Verify the structure follows OpenAI API spec
|
||||
assert input_items_response.object == "list"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue