mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-04 02:03:44 +00:00
feat: add oci genai service as chat inference provider
This commit is contained in:
parent
6147321083
commit
76d615d6d1
15 changed files with 938 additions and 0 deletions
|
|
@ -138,6 +138,7 @@ def skip_if_model_doesnt_support_openai_embeddings(client, model_id):
|
|||
"remote::runpod",
|
||||
"remote::sambanova",
|
||||
"remote::tgi",
|
||||
"remote::oci",
|
||||
):
|
||||
pytest.skip(f"Model {model_id} hosted by {provider.provider_type} doesn't support OpenAI embeddings.")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue