Remove remote::openai from openai_completion support

Most of their models don't support it any more as
its seen as legacy.

Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
Derek Higgins 2025-06-04 09:08:30 +01:00
parent 643c0bb747
commit c4f644a1ea

View file

@ -37,6 +37,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
# support both completions and chat completions endpoint and all the Llama models are
# just chat completions
"remote::nvidia",
"remote::openai",
"remote::runpod",
"remote::sambanova",
"remote::tgi",