test: Updated test skips that were marked with "inline::vllm"

This should be "remote::vllm". This causes some log
probs tests to be skipped with remote vllm. (They
fail if run).

Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
Derek Higgins 2025-10-30 10:50:19 +00:00
parent b90c6a2c8b
commit 59ab191ac7

View file

@ -39,7 +39,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
if provider.provider_type in (
"inline::meta-reference",
"inline::sentence-transformers",
"inline::vllm",
"remote::vllm",
"remote::bedrock",
"remote::databricks",
# Technically Nvidia does support OpenAI completions, but none of their hosted models
@ -120,7 +120,7 @@ def skip_if_model_doesnt_support_openai_chat_completion(client_with_models, mode
if provider.provider_type in (
"inline::meta-reference",
"inline::sentence-transformers",
"inline::vllm",
"remote::vllm",
"remote::bedrock",
"remote::databricks",
"remote::cerebras",