inference_providers: - ollama - fireworks - together - tgi - vllm test_models: text: meta-llama/Llama-3.1-8B-Instruct vision: meta-llama/Llama-3.2-11B-Vision-Instruct inference/test_vision_inference.py test_vision_chat_completion_streaming test_vision_chat_completion_non_streaming inference/test_text_inference.py test_structured_output test_chat_completion_streaming test_chat_completion_non_streaming test_chat_completion_with_tool_calling test_chat_completion_with_tool_calling_streaming