migrate dataset to resource

This commit is contained in:
Dinesh Yeduguru 2024-11-11 12:15:41 -08:00
parent 38cce97597
commit f8e39ae873
7 changed files with 70 additions and 51 deletions

View file

@ -11,7 +11,7 @@ from urllib.parse import urlparse
from llama_models.schema_utils import json_schema_type
from pydantic import BaseModel, Field
from llama_stack.apis.datasets import DatasetDef
from llama_stack.apis.datasets import Dataset
from llama_stack.apis.eval_tasks import EvalTaskDef
from llama_stack.apis.memory_banks.memory_banks import MemoryBank
from llama_stack.apis.models import Model
@ -57,9 +57,9 @@ class MemoryBanksProtocolPrivate(Protocol):
class DatasetsProtocolPrivate(Protocol):
async def list_datasets(self) -> List[DatasetDef]: ...
async def list_datasets(self) -> List[Dataset]: ...
async def register_dataset(self, dataset_def: DatasetDef) -> None: ...
async def register_dataset(self, dataset: Dataset) -> None: ...
class ScoringFunctionsProtocolPrivate(Protocol):

View file

@ -37,12 +37,12 @@ class BaseDataset(ABC):
@dataclass
class DatasetInfo:
dataset_def: DatasetDef
dataset_def: Dataset
dataset_impl: BaseDataset
class PandasDataframeDataset(BaseDataset):
def __init__(self, dataset_def: DatasetDef, *args, **kwargs) -> None:
def __init__(self, dataset_def: Dataset, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.dataset_def = dataset_def
self.df = None
@ -60,9 +60,9 @@ class PandasDataframeDataset(BaseDataset):
def _validate_dataset_schema(self, df) -> pandas.DataFrame:
# note that we will drop any columns in dataset that are not in the schema
df = df[self.dataset_def.dataset_schema.keys()]
df = df[self.dataset_def.schema.keys()]
# check all columns in dataset schema are present
assert len(df.columns) == len(self.dataset_def.dataset_schema)
assert len(df.columns) == len(self.dataset_def.schema)
# TODO: type checking against column types in dataset schema
return df
@ -89,15 +89,15 @@ class LocalFSDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
async def register_dataset(
self,
dataset_def: DatasetDef,
dataset: Dataset,
) -> None:
dataset_impl = PandasDataframeDataset(dataset_def)
self.dataset_infos[dataset_def.identifier] = DatasetInfo(
dataset_def=dataset_def,
dataset_impl = PandasDataframeDataset(dataset)
self.dataset_infos[dataset.identifier] = DatasetInfo(
dataset_def=dataset,
dataset_impl=dataset_impl,
)
async def list_datasets(self) -> List[DatasetDef]:
async def list_datasets(self) -> List[Dataset]:
return [i.dataset_def for i in self.dataset_infos.values()]
async def get_rows_paginated(

View file

@ -55,15 +55,11 @@ async def register_dataset(
"generated_answer": StringType(),
}
dataset = DatasetDefWithProvider(
identifier=dataset_id,
provider_id="",
url=URL(
uri=test_url,
),
dataset_schema=dataset_schema,
await datasets_impl.register_dataset(
dataset_id=dataset_id,
schema=dataset_schema,
url=URL(uri=test_url),
)
await datasets_impl.register_dataset(dataset)
class TestDatasetIO: