llama-stack-mirror/llama_stack/providers/tests/ci_test_config.yaml
2025-01-15 16:17:27 -08:00

24 lines
592 B
YAML

tests:
- path: inference/test_vision_inference.py
functions:
- test_vision_chat_completion_streaming
- test_vision_chat_completion_non_streaming
- path: inference/test_text_inference.py
functions:
- test_structured_output
- test_chat_completion_streaming
- test_chat_completion_non_streaming
- test_chat_completion_with_tool_calling
- test_chat_completion_with_tool_calling_streaming
inference_fixtures:
- ollama
- fireworks
- together
- tgi
- vllm_remote
test_models:
text: meta-llama/Llama-3.1-8B-Instruct
vision: meta-llama/Llama-3.2-11B-Vision-Instruct