mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 04:50:39 +00:00
test: Do not require registration of both text and vision models
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
46b0a404e8
commit
2ac97b905e
1 changed files with 11 additions and 5 deletions
|
@ -6,7 +6,7 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from llama_stack_client import LlamaStackClient
|
||||
from llama_stack_client import BadRequestError, LlamaStackClient
|
||||
from report import Report
|
||||
|
||||
from llama_stack import LlamaStackAsLibraryClient
|
||||
|
@ -120,10 +120,16 @@ def client_with_models(llama_stack_client, text_model_id, vision_model_id, embed
|
|||
model_ids = {m.identifier for m in client.models.list()}
|
||||
model_ids.update(m.provider_resource_id for m in client.models.list())
|
||||
|
||||
try:
|
||||
if text_model_id and text_model_id not in model_ids:
|
||||
client.models.register(model_id=text_model_id, provider_id=inference_providers[0])
|
||||
except BadRequestError:
|
||||
pass
|
||||
try:
|
||||
if vision_model_id and vision_model_id not in model_ids:
|
||||
client.models.register(model_id=vision_model_id, provider_id=inference_providers[0])
|
||||
except BadRequestError:
|
||||
pass
|
||||
|
||||
if embedding_model_id and embedding_dimension and embedding_model_id not in model_ids:
|
||||
# try to find a provider that supports embeddings, if sentence-transformers is not available
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue