mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-07 02:58:21 +00:00
test: Make text-based chat completion tests run faster
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
7766e68e92
commit
6dea61609d
1 changed files with 2 additions and 2 deletions
|
@ -156,8 +156,8 @@ def test_text_completion_structured_output(llama_stack_client, text_model_id, in
|
|||
@pytest.mark.parametrize(
|
||||
"question,expected",
|
||||
[
|
||||
("What are the names of planets in our solar system?", "Earth"),
|
||||
("What are the names of the planets that have rings around them?", "Saturn"),
|
||||
("Which planet do humans live?", "Earth"),
|
||||
("Which planet has rings around them with a name starting with letter S?", "Saturn"),
|
||||
],
|
||||
)
|
||||
def test_text_chat_completion_non_streaming(llama_stack_client, text_model_id, question, expected):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue