mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 19:04:19 +00:00
Remove remote::openai from openai_completion support
Most of their models don't support it any more as its seen as legacy. Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
643c0bb747
commit
c4f644a1ea
1 changed files with 1 additions and 0 deletions
|
@ -37,6 +37,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
|
|||
# support both completions and chat completions endpoint and all the Llama models are
|
||||
# just chat completions
|
||||
"remote::nvidia",
|
||||
"remote::openai",
|
||||
"remote::runpod",
|
||||
"remote::sambanova",
|
||||
"remote::tgi",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue