refactoring some code into openai_compat

Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
Francisco Javier Arceo 2025-06-13 12:06:58 -04:00
parent 89d4a05303
commit e0f1788e9e
4 changed files with 95 additions and 40 deletions

View file

@ -34,7 +34,11 @@ def skip_if_model_doesnt_support_variable_dimensions(model_id):
pytest.skip("{model_id} does not support variable output embedding dimensions")
@pytest.fixture(params=["openai_client", "llama_stack_client"])
@pytest.fixture(
params=[
"openai_client",
]
)
def compat_client(request, client_with_models):
if request.param == "openai_client" and isinstance(client_with_models, LlamaStackAsLibraryClient):
pytest.skip("OpenAI client tests not supported with library client")
@ -55,12 +59,6 @@ def skip_if_model_doesnt_support_openai_embeddings(client, model_id):
pytest.skip(f"Model {model_id} hosted by {provider.provider_type} doesn't support OpenAI embeddings.")
def skip_if_client_doesnt_support_base64_encoding(client, model_id):
provider = provider_from_model(client, model_id)
if provider.provider_type in ("remote::ollama",):
pytest.skip(f"Client {client} doesn't support base64 encoding for embeddings.")
@pytest.fixture
def openai_client(client_with_models):
base_url = f"{client_with_models.base_url}/v1/openai/v1"
@ -253,7 +251,6 @@ def test_openai_embeddings_with_encoding_format_base64(compat_client, client_wit
def test_openai_embeddings_base64_batch_processing(compat_client, client_with_models, embedding_model_id):
"""Test OpenAI embeddings endpoint with base64 encoding for batch processing."""
skip_if_model_doesnt_support_openai_embeddings(client_with_models, embedding_model_id)
skip_if_client_doesnt_support_base64_encoding(client_with_models, embedding_model_id)
input_texts = ["First text for base64", "Second text for base64", "Third text for base64"]