rename meta-reference -> basic

This commit is contained in:
Xi Yan 2024-11-11 22:36:43 -05:00
parent 218803b7c8
commit 8dee5faf0d
16 changed files with 11 additions and 10 deletions

View file

@ -70,18 +70,18 @@ class MetaReferenceScoringImpl(Scoring, ScoringFunctionsProtocolPrivate):
raise NotImplementedError("Register scoring function not implemented yet") raise NotImplementedError("Register scoring function not implemented yet")
async def validate_scoring_input_dataset_schema(self, dataset_id: str) -> None: async def validate_scoring_input_dataset_schema(self, dataset_id: str) -> None:
dataset_def = await self.datasets_api.get_dataset(dataset_identifier=dataset_id) dataset_def = await self.datasets_api.get_dataset(dataset_id=dataset_id)
if not dataset_def.dataset_schema or len(dataset_def.dataset_schema) == 0: if not dataset_def.schema or len(dataset_def.schema) == 0:
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a schema defined. Please define a schema for the dataset." f"Dataset {dataset_id} does not have a schema defined. Please define a schema for the dataset."
) )
for required_column in ["generated_answer", "expected_answer", "input_query"]: for required_column in ["generated_answer", "expected_answer", "input_query"]:
if required_column not in dataset_def.dataset_schema: if required_column not in dataset_def.schema:
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a '{required_column}' column." f"Dataset {dataset_id} does not have a '{required_column}' column."
) )
if dataset_def.dataset_schema[required_column].type != "string": if dataset_def.schema[required_column].type != "string":
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a '{required_column}' column of type 'string'." f"Dataset {dataset_id} does not have a '{required_column}' column of type 'string'."
) )

View file

@ -63,18 +63,19 @@ class BraintrustScoringImpl(Scoring, ScoringFunctionsProtocolPrivate):
) )
async def validate_scoring_input_dataset_schema(self, dataset_id: str) -> None: async def validate_scoring_input_dataset_schema(self, dataset_id: str) -> None:
dataset_def = await self.datasets_api.get_dataset(dataset_identifier=dataset_id) dataset_def = await self.datasets_api.get_dataset(dataset_id=dataset_id)
if not dataset_def.dataset_schema or len(dataset_def.dataset_schema) == 0: print(dataset_def)
if not dataset_def.schema or len(dataset_def.schema) == 0:
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a schema defined. Please define a schema for the dataset." f"Dataset {dataset_id} does not have a schema defined. Please define a schema for the dataset."
) )
for required_column in ["generated_answer", "expected_answer", "input_query"]: for required_column in ["generated_answer", "expected_answer", "input_query"]:
if required_column not in dataset_def.dataset_schema: if required_column not in dataset_def.schema:
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a '{required_column}' column." f"Dataset {dataset_id} does not have a '{required_column}' column."
) )
if dataset_def.dataset_schema[required_column].type != "string": if dataset_def.schema[required_column].type != "string":
raise ValueError( raise ValueError(
f"Dataset {dataset_id} does not have a '{required_column}' column of type 'string'." f"Dataset {dataset_id} does not have a '{required_column}' column of type 'string'."
) )

View file

@ -15,8 +15,8 @@ def available_providers() -> List[ProviderSpec]:
api=Api.scoring, api=Api.scoring,
provider_type="meta-reference", provider_type="meta-reference",
pip_packages=[], pip_packages=[],
module="llama_stack.providers.inline.scoring.meta_reference", module="llama_stack.providers.inline.scoring.basic",
config_class="llama_stack.providers.inline.scoring.meta_reference.MetaReferenceScoringConfig", config_class="llama_stack.providers.inline.scoring.basic.MetaReferenceScoringConfig",
api_dependencies=[ api_dependencies=[
Api.datasetio, Api.datasetio,
Api.datasets, Api.datasets,