[#391] Add support for json structured output for vLLM

This commit is contained in:
Aidan Do 2024-11-26 09:40:17 +00:00
parent 4e6c984c26
commit 1801aa145d
2 changed files with 13 additions and 0 deletions

View file

@ -139,6 +139,7 @@ class TestInference:
"remote::tgi",
"remote::together",
"remote::fireworks",
"remote::vllm",
):
pytest.skip(
"Other inference providers don't support structured output in completions yet"
@ -198,6 +199,7 @@ class TestInference:
"remote::fireworks",
"remote::tgi",
"remote::together",
"remote::vllm",
"remote::nvidia",
):
pytest.skip("Other inference providers don't support structured output yet")