From 9ae7c3ad24d9fd3d44a06d651c1fa920d09b98ab Mon Sep 17 00:00:00 2001 From: Sixian Yi Date: Fri, 17 Jan 2025 16:39:48 -0800 Subject: [PATCH] add tgi --- llama_stack/providers/tests/test_report.md | 32 ++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/llama_stack/providers/tests/test_report.md b/llama_stack/providers/tests/test_report.md index f7fb40411..4767cc3db 100644 --- a/llama_stack/providers/tests/test_report.md +++ b/llama_stack/providers/tests/test_report.md @@ -37,3 +37,35 @@ | llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling | 1 | 1 | | llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling_streaming | 1 | 1 | | TOTAL | | 6 | 6 | + + +### tgi + +| filepath | function | passed | skipped | SUBTOTAL | +| ------------------------------------------------ | -------------------------------------------------------------- | -----: | ------: | -------: | +| providers/tests/inference/test_text_inference.py | TestInference.test_model_list | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_completion | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_non_streaming | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_structured_output | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_streaming | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling_streaming | 1 | 0 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_completion_logprobs | 0 | 1 | 1 | +| providers/tests/inference/test_text_inference.py | TestInference.test_completion_structured_output | 0 | 1 | 1 | +| TOTAL | | 7 | 2 | 9 | + + +### vLLM + +| filepath | function | passed | skipped | SUBTOTAL | +| ------------------------------------------------------------ | -------------------------------------------------------------- | -----: | ------: | -------: | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_model_list | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_non_streaming | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_structured_output | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_streaming | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_chat_completion_with_tool_calling_streaming | 1 | 0 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_completion | 0 | 1 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_completion_logprobs | 0 | 1 | 1 | +| llama_stack/providers/tests/inference/test_text_inference.py | TestInference.test_completion_structured_output | 0 | 1 | 1 | +| TOTAL | | 6 | 3 | 9 |