mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-06 02:32:40 +00:00
add test report
This commit is contained in:
parent
4a0f1c55e9
commit
8ad0e31359
3 changed files with 58 additions and 5 deletions
53
llama_stack/templates/fireworks/report.md
Normal file
53
llama_stack/templates/fireworks/report.md
Normal file
|
@ -0,0 +1,53 @@
|
|||
# Report for fireworks distribution
|
||||
|
||||
## Supported Models:
|
||||
| Model Descriptor | fireworks |
|
||||
|:---|:---|
|
||||
| Llama-3-8B-Instruct | ❌ |
|
||||
| Llama-3-70B-Instruct | ❌ |
|
||||
| Llama3.1-8B-Instruct | ✅ |
|
||||
| Llama3.1-70B-Instruct | ✅ |
|
||||
| Llama3.1-405B-Instruct | ✅ |
|
||||
| Llama3.1-405B-Instruct | ✅ |
|
||||
| Llama3.1-405B-Instruct | ✅ |
|
||||
| Llama3.2-1B-Instruct | ✅ |
|
||||
| Llama3.2-3B-Instruct | ✅ |
|
||||
| Llama3.2-1B-Instruct | ✅ |
|
||||
| Llama3.2-1B-Instruct | ✅ |
|
||||
| Llama3.2-3B-Instruct | ✅ |
|
||||
| Llama3.2-3B-Instruct | ✅ |
|
||||
| Llama3.2-11B-Vision-Instruct | ✅ |
|
||||
| Llama3.2-90B-Vision-Instruct | ✅ |
|
||||
| Llama3.3-70B-Instruct | ✅ |
|
||||
| Llama-Guard-3-11B-Vision | ✅ |
|
||||
| Llama-Guard-3-1B | ❌ |
|
||||
| Llama-Guard-3-1B | ❌ |
|
||||
| Llama-Guard-3-8B | ✅ |
|
||||
| Llama-Guard-3-8B | ✅ |
|
||||
| Llama-Guard-2-8B | ❌ |
|
||||
|
||||
## Inference:
|
||||
| Model | API | Capability | Test | Status |
|
||||
|:----- |:-----|:-----|:-----|:-----|
|
||||
| Text | /chat_completion | streaming | test_text_chat_completion_streaming | Passed |
|
||||
| Vision | /chat_completion | streaming | test_image_chat_completion_streaming | Passed |
|
||||
| Text | /chat_completion | non_streaming | test_text_chat_completion_non_streaming | Passed |
|
||||
| Vision | /chat_completion | non_streaming | test_image_chat_completion_non_streaming | Passed |
|
||||
| Text | /chat_completion | tool_calling | test_text_chat_completion_with_tool_calling_and_streaming | Passed |
|
||||
| Text | /chat_completion | tool_calling | test_text_chat_completion_with_tool_calling_and_non_streaming | Passed |
|
||||
| Text | /completion | streaming | test_text_completion_streaming | Passed |
|
||||
| Text | /completion | non_streaming | test_text_completion_non_streaming | Passed |
|
||||
| Text | /completion | structured_output | test_text_completion_structured_output | Passed |
|
||||
|
||||
## Memory:
|
||||
| API | Capability | Test | Status |
|
||||
|:-----|:-----|:-----|:-----|
|
||||
| insert and query | inline | test_memory_bank_insert_inline_and_query | Error |
|
||||
| insert and query | url | test_memory_bank_insert_from_url_and_query | Failed |
|
||||
|
||||
## Agent:
|
||||
| API | Capability | Test | Status |
|
||||
|:-----|:-----|:-----|:-----|
|
||||
| create_agent_turn | rag | test_rag_agent | Failed |
|
||||
| create_agent_turn | custom_tool | test_custom_tool | Passed |
|
||||
| create_agent_turn | code_execution | test_code_execution | Failed |
|
|
@ -16,15 +16,15 @@ from report import Report
|
|||
def pytest_configure(config):
|
||||
config.option.tbstyle = "short"
|
||||
config.option.disable_warnings = True
|
||||
|
||||
if config.getoption("--output"):
|
||||
if config.getoption("--report"):
|
||||
config.pluginmanager.register(Report())
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--output",
|
||||
action="store_false",
|
||||
"--report",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Knob to determine if we should generate report, e.g. --output=True",
|
||||
)
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ class Report:
|
|||
report.append("\n## Supported Models: ")
|
||||
|
||||
header = f"| Model Descriptor | {self.image_name} |"
|
||||
dividor = "|:---|"
|
||||
dividor = "|:---|:---|"
|
||||
|
||||
report.append(header)
|
||||
report.append(dividor)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue