mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 16:00:04 +00:00
24 lines
592 B
YAML
24 lines
592 B
YAML
tests:
|
|
- path: inference/test_vision_inference.py
|
|
functions:
|
|
- test_vision_chat_completion_streaming
|
|
- test_vision_chat_completion_non_streaming
|
|
|
|
- path: inference/test_text_inference.py
|
|
functions:
|
|
- test_structured_output
|
|
- test_chat_completion_streaming
|
|
- test_chat_completion_non_streaming
|
|
- test_chat_completion_with_tool_calling
|
|
- test_chat_completion_with_tool_calling_streaming
|
|
|
|
inference_fixtures:
|
|
- ollama
|
|
- fireworks
|
|
- together
|
|
- tgi
|
|
- vllm_remote
|
|
|
|
test_models:
|
|
text: meta-llama/Llama-3.1-8B-Instruct
|
|
vision: meta-llama/Llama-3.2-11B-Vision-Instruct
|