test: Updated test skips that were marked with "inline::vllm" (#3979)

This should be "remote::vllm". This causes some log probs tests to be
skipped with remote vllm. (They
fail if run).

Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
Derek Higgins 2025-10-30 13:48:21 +00:00 committed by GitHub
parent 174ef162b3
commit 19d85003de
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -39,7 +39,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
if provider.provider_type in ( if provider.provider_type in (
"inline::meta-reference", "inline::meta-reference",
"inline::sentence-transformers", "inline::sentence-transformers",
"inline::vllm", "remote::vllm",
"remote::bedrock", "remote::bedrock",
"remote::databricks", "remote::databricks",
# Technically Nvidia does support OpenAI completions, but none of their hosted models # Technically Nvidia does support OpenAI completions, but none of their hosted models
@ -120,7 +120,7 @@ def skip_if_model_doesnt_support_openai_chat_completion(client_with_models, mode
if provider.provider_type in ( if provider.provider_type in (
"inline::meta-reference", "inline::meta-reference",
"inline::sentence-transformers", "inline::sentence-transformers",
"inline::vllm", "remote::vllm",
"remote::bedrock", "remote::bedrock",
"remote::databricks", "remote::databricks",
"remote::cerebras", "remote::cerebras",