mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-03 03:12:15 +00:00
[test automation] add support for test config
This commit is contained in:
parent
4d3f0ae79b
commit
702cf2d563
3 changed files with 66 additions and 15 deletions
24
llama_stack/providers/tests/ci_test_config.yaml
Normal file
24
llama_stack/providers/tests/ci_test_config.yaml
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
tests:
|
||||
- path: inference/test_vision_inference.py
|
||||
functions:
|
||||
- test_vision_chat_completion_streaming
|
||||
- test_vision_chat_completion_non_streaming
|
||||
|
||||
- path: inference/test_text_inference.py
|
||||
functions:
|
||||
- test_structured_output
|
||||
- test_chat_completion_streaming
|
||||
- test_chat_completion_non_streaming
|
||||
- test_chat_completion_with_tool_calling
|
||||
- test_chat_completion_with_tool_calling_streaming
|
||||
|
||||
inference_fixtures:
|
||||
- ollama
|
||||
- fireworks
|
||||
- together
|
||||
- tgi
|
||||
- vllm_remote
|
||||
|
||||
test_models:
|
||||
text: meta-llama/Llama-3.1-8B-Instruct
|
||||
vision: meta-llama/Llama-3.2-11B-Vision-Instruct
|
||||
Loading…
Add table
Add a link
Reference in a new issue