chore(test): migrate unit tests from unittest to pytest for system prompt (#2789)

This PR replaces unittest with pytest.

Part of https://github.com/meta-llama/llama-stack/issues/2680

cc @leseb

Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
Mustafa Elbehery 2025-07-18 11:54:02 +02:00 committed by GitHub
parent 3cdf748a8e
commit 75480b01b8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -12,7 +12,6 @@
# the top-level of this source tree.
import textwrap
import unittest
from datetime import datetime
from llama_stack.models.llama.llama3.prompt_templates import (
@ -24,24 +23,24 @@ from llama_stack.models.llama.llama3.prompt_templates import (
)
class PromptTemplateTests(unittest.TestCase):
def check_generator_output(self, generator):
def check_generator_output(generator):
for example in generator.data_examples():
pt = generator.gen(example)
text = pt.render()
# print(text) # debugging
if not example:
continue
for tool in example:
assert tool.tool_name in text
def test_system_default(self):
def test_system_default():
generator = SystemDefaultGenerator()
today = datetime.now().strftime("%d %B %Y")
expected_text = f"Cutting Knowledge Date: December 2023\nToday Date: {today}"
assert expected_text.strip("\n") == generator.gen(generator.data_examples()[0]).render()
def test_system_builtin_only(self):
def test_system_builtin_only():
generator = BuiltinToolGenerator()
expected_text = textwrap.dedent(
"""
@ -51,21 +50,23 @@ class PromptTemplateTests(unittest.TestCase):
)
assert expected_text.strip("\n") == generator.gen(generator.data_examples()[0]).render()
def test_system_custom_only(self):
self.maxDiff = None
def test_system_custom_only():
generator = JsonCustomToolGenerator()
self.check_generator_output(generator)
check_generator_output(generator)
def test_system_custom_function_tag(self):
self.maxDiff = None
def test_system_custom_function_tag():
generator = FunctionTagCustomToolGenerator()
self.check_generator_output(generator)
check_generator_output(generator)
def test_llama_3_2_system_zero_shot(self):
def test_llama_3_2_system_zero_shot():
generator = PythonListCustomToolGenerator()
self.check_generator_output(generator)
check_generator_output(generator)
def test_llama_3_2_provided_system_prompt(self):
def test_llama_3_2_provided_system_prompt():
generator = PythonListCustomToolGenerator()
user_system_prompt = textwrap.dedent(
"""