chore: inference=remote::llama-openai-compat does not support /v1/completion

This commit is contained in:
Matthew Farrellee 2025-10-04 12:47:23 -04:00
parent f176196fba
commit 05afe923b8
2 changed files with 29 additions and 0 deletions

View file

@ -59,6 +59,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
# again. You can learn more about which models can be used with each operation here:
# https://go.microsoft.com/fwlink/?linkid=2197993.'}}"}
"remote::watsonx", # return 404 when hitting the /openai/v1 endpoint
"remote::llama-openai-compat",
):
pytest.skip(f"Model {model_id} hosted by {provider.provider_type} doesn't support OpenAI completions.")