From c4f644a1ea9819c4df04595f2260617f4177caf8 Mon Sep 17 00:00:00 2001 From: Derek Higgins Date: Wed, 4 Jun 2025 09:08:30 +0100 Subject: [PATCH] Remove remote::openai from openai_completion support Most of their models don't support it any more as its seen as legacy. Signed-off-by: Derek Higgins --- tests/integration/inference/test_openai_completion.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/inference/test_openai_completion.py b/tests/integration/inference/test_openai_completion.py index 190840f70..11bc5bef6 100644 --- a/tests/integration/inference/test_openai_completion.py +++ b/tests/integration/inference/test_openai_completion.py @@ -37,6 +37,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id) # support both completions and chat completions endpoint and all the Llama models are # just chat completions "remote::nvidia", + "remote::openai", "remote::runpod", "remote::sambanova", "remote::tgi",