llama-stack/llama_stack/providers/tests/scoring/fixtures.py
Xi Yan 84c6fbbd93
fix tests after registration migration & rename meta-reference -> basic / llm_as_judge provider (#424)
* rename meta-reference -> basic

* config rename

* impl rename

* rename llm_as_judge, fix test

* util

* rebase

* naming fix
2024-11-12 10:35:44 -05:00

95 lines
2.5 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import pytest
import pytest_asyncio
from llama_stack.distribution.datatypes import Api, Provider
from llama_stack.providers.tests.resolver import resolve_impls_for_test_v2
from ..conftest import ProviderFixture, remote_stack_fixture
@pytest.fixture(scope="session")
def scoring_remote() -> ProviderFixture:
return remote_stack_fixture()
@pytest.fixture(scope="session")
def scoring_basic() -> ProviderFixture:
return ProviderFixture(
providers=[
Provider(
provider_id="basic",
provider_type="inline::basic",
config={},
)
],
)
@pytest.fixture(scope="session")
def scoring_braintrust() -> ProviderFixture:
return ProviderFixture(
providers=[
Provider(
provider_id="braintrust",
provider_type="inline::braintrust",
config={},
)
],
)
@pytest.fixture(scope="session")
def scoring_llm_as_judge() -> ProviderFixture:
return ProviderFixture(
providers=[
Provider(
provider_id="llm-as-judge",
provider_type="inline::llm-as-judge",
config={},
)
],
)
SCORING_FIXTURES = ["basic", "remote", "braintrust", "llm_as_judge"]
@pytest_asyncio.fixture(scope="session")
async def scoring_stack(request, inference_model):
fixture_dict = request.param
providers = {}
provider_data = {}
for key in ["datasetio", "scoring", "inference"]:
fixture = request.getfixturevalue(f"{key}_{fixture_dict[key]}")
providers[key] = fixture.providers
if fixture.provider_data:
provider_data.update(fixture.provider_data)
impls = await resolve_impls_for_test_v2(
[Api.scoring, Api.datasetio, Api.inference],
providers,
provider_data,
)
provider_id = providers["inference"][0].provider_id
await impls[Api.models].register_model(
model_id=inference_model,
provider_id=provider_id,
)
await impls[Api.models].register_model(
model_id="Llama3.1-405B-Instruct",
provider_id=provider_id,
)
await impls[Api.models].register_model(
model_id="Llama3.1-8B-Instruct",
provider_id=provider_id,
)
return impls