From 8ad0e31359358ad7cb3d22f86783643e22d774ce Mon Sep 17 00:00:00 2001 From: Sixian Yi Date: Tue, 21 Jan 2025 17:37:14 -0800 Subject: [PATCH] add test report --- llama_stack/templates/fireworks/report.md | 53 +++++++++++++++++++++++ tests/client-sdk/conftest.py | 8 ++-- tests/client-sdk/report.py | 2 +- 3 files changed, 58 insertions(+), 5 deletions(-) create mode 100644 llama_stack/templates/fireworks/report.md diff --git a/llama_stack/templates/fireworks/report.md b/llama_stack/templates/fireworks/report.md new file mode 100644 index 000000000..890132b33 --- /dev/null +++ b/llama_stack/templates/fireworks/report.md @@ -0,0 +1,53 @@ +# Report for fireworks distribution + +## Supported Models: +| Model Descriptor | fireworks | +|:---|:---| +| Llama-3-8B-Instruct | ❌ | +| Llama-3-70B-Instruct | ❌ | +| Llama3.1-8B-Instruct | ✅ | +| Llama3.1-70B-Instruct | ✅ | +| Llama3.1-405B-Instruct | ✅ | +| Llama3.1-405B-Instruct | ✅ | +| Llama3.1-405B-Instruct | ✅ | +| Llama3.2-1B-Instruct | ✅ | +| Llama3.2-3B-Instruct | ✅ | +| Llama3.2-1B-Instruct | ✅ | +| Llama3.2-1B-Instruct | ✅ | +| Llama3.2-3B-Instruct | ✅ | +| Llama3.2-3B-Instruct | ✅ | +| Llama3.2-11B-Vision-Instruct | ✅ | +| Llama3.2-90B-Vision-Instruct | ✅ | +| Llama3.3-70B-Instruct | ✅ | +| Llama-Guard-3-11B-Vision | ✅ | +| Llama-Guard-3-1B | ❌ | +| Llama-Guard-3-1B | ❌ | +| Llama-Guard-3-8B | ✅ | +| Llama-Guard-3-8B | ✅ | +| Llama-Guard-2-8B | ❌ | + +## Inference: +| Model | API | Capability | Test | Status | +|:----- |:-----|:-----|:-----|:-----| +| Text | /chat_completion | streaming | test_text_chat_completion_streaming | Passed | +| Vision | /chat_completion | streaming | test_image_chat_completion_streaming | Passed | +| Text | /chat_completion | non_streaming | test_text_chat_completion_non_streaming | Passed | +| Vision | /chat_completion | non_streaming | test_image_chat_completion_non_streaming | Passed | +| Text | /chat_completion | tool_calling | test_text_chat_completion_with_tool_calling_and_streaming | Passed | +| Text | /chat_completion | tool_calling | test_text_chat_completion_with_tool_calling_and_non_streaming | Passed | +| Text | /completion | streaming | test_text_completion_streaming | Passed | +| Text | /completion | non_streaming | test_text_completion_non_streaming | Passed | +| Text | /completion | structured_output | test_text_completion_structured_output | Passed | + +## Memory: +| API | Capability | Test | Status | +|:-----|:-----|:-----|:-----| +| insert and query | inline | test_memory_bank_insert_inline_and_query | Error | +| insert and query | url | test_memory_bank_insert_from_url_and_query | Failed | + +## Agent: +| API | Capability | Test | Status | +|:-----|:-----|:-----|:-----| +| create_agent_turn | rag | test_rag_agent | Failed | +| create_agent_turn | custom_tool | test_custom_tool | Passed | +| create_agent_turn | code_execution | test_code_execution | Failed | \ No newline at end of file diff --git a/tests/client-sdk/conftest.py b/tests/client-sdk/conftest.py index 1a55cb6dc..c19546887 100644 --- a/tests/client-sdk/conftest.py +++ b/tests/client-sdk/conftest.py @@ -16,15 +16,15 @@ from report import Report def pytest_configure(config): config.option.tbstyle = "short" config.option.disable_warnings = True - - if config.getoption("--output"): + if config.getoption("--report"): config.pluginmanager.register(Report()) def pytest_addoption(parser): parser.addoption( - "--output", - action="store_false", + "--report", + default=False, + action="store_true", help="Knob to determine if we should generate report, e.g. --output=True", ) diff --git a/tests/client-sdk/report.py b/tests/client-sdk/report.py index 08d04e1f3..614bc8296 100644 --- a/tests/client-sdk/report.py +++ b/tests/client-sdk/report.py @@ -108,7 +108,7 @@ class Report: report.append("\n## Supported Models: ") header = f"| Model Descriptor | {self.image_name} |" - dividor = "|:---|" + dividor = "|:---|:---|" report.append(header) report.append(dividor)