mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 12:22:36 +00:00
[#391] Add support for json structured output for vLLM
This commit is contained in:
parent
4e6c984c26
commit
1801aa145d
2 changed files with 13 additions and 0 deletions
|
|
@ -139,6 +139,7 @@ class TestInference:
|
|||
"remote::tgi",
|
||||
"remote::together",
|
||||
"remote::fireworks",
|
||||
"remote::vllm",
|
||||
):
|
||||
pytest.skip(
|
||||
"Other inference providers don't support structured output in completions yet"
|
||||
|
|
@ -198,6 +199,7 @@ class TestInference:
|
|||
"remote::fireworks",
|
||||
"remote::tgi",
|
||||
"remote::together",
|
||||
"remote::vllm",
|
||||
"remote::nvidia",
|
||||
):
|
||||
pytest.skip("Other inference providers don't support structured output yet")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue