mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-29 03:14:19 +00:00
chore: Add OpenAI compatibility for Ollama embeddings
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
fef670b024
commit
15042c6c31
2 changed files with 39 additions and 3 deletions
|
@ -51,7 +51,6 @@ def skip_if_model_doesnt_support_openai_embeddings(client, model_id):
|
|||
"remote::runpod",
|
||||
"remote::sambanova",
|
||||
"remote::tgi",
|
||||
"remote::ollama",
|
||||
):
|
||||
pytest.skip(f"Model {model_id} hosted by {provider.provider_type} doesn't support OpenAI embeddings.")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue