mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 19:04:19 +00:00
multi turn
# What does this PR do? ## Test Plan
This commit is contained in:
parent
31a3ae60f4
commit
dd9e0ec23b
1 changed files with 65 additions and 0 deletions
|
@ -3,6 +3,8 @@
|
||||||
#
|
#
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
|
|
||||||
|
@ -93,6 +95,69 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
|
||||||
assert retrieved_response.output[0].content[0].text == content
|
assert retrieved_response.output[0].content[0].text == content
|
||||||
|
|
||||||
|
|
||||||
|
def test_responses_store_with_tools(openai_client, client_with_models, text_model_id):
|
||||||
|
"""Tests that previous response id is used to continue the conversation with tools."""
|
||||||
|
if isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||||
|
pytest.skip("OpenAI responses are not supported when testing with library client yet.")
|
||||||
|
|
||||||
|
client = openai_client
|
||||||
|
message = "What's the weather in Tokyo?" + " YOU MUST USE THE get_weather function to get the weather."
|
||||||
|
response = client.responses.create(
|
||||||
|
model=text_model_id,
|
||||||
|
input=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": message,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
stream=False,
|
||||||
|
tools=[
|
||||||
|
{
|
||||||
|
"type": "function",
|
||||||
|
"name": "get_weather",
|
||||||
|
"description": "Get the weather in a given city",
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"city": {"type": "string", "description": "The city to get the weather for"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
assert response.output[0].type == "function_call"
|
||||||
|
assert response.output[0].name == "get_weather"
|
||||||
|
assert json.loads(response.output[0].arguments) == {"city": "Tokyo"}
|
||||||
|
|
||||||
|
response = client.responses.create(
|
||||||
|
model=text_model_id,
|
||||||
|
input=[
|
||||||
|
{
|
||||||
|
"type": "function_call_output",
|
||||||
|
"call_id": response.output[0].call_id,
|
||||||
|
"output": "sunny and warm",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
stream=False,
|
||||||
|
tools=[
|
||||||
|
{
|
||||||
|
"type": "function",
|
||||||
|
"name": "get_weather",
|
||||||
|
"description": "Get the weather in a given city",
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"city": {"type": "string", "description": "The city to get the weather for"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
previous_response_id=response.id,
|
||||||
|
)
|
||||||
|
assert response.output[0].type == "message"
|
||||||
|
assert "sunny and warm" in response.output[0].content[0].text
|
||||||
|
|
||||||
|
|
||||||
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
||||||
"""Test the new list_openai_response_input_items endpoint."""
|
"""Test the new list_openai_response_input_items endpoint."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient):
|
if isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue