address comments

This commit is contained in:
Xi Yan 2024-10-22 14:13:00 -07:00
parent 95aef814af
commit bd39b22717
7 changed files with 25 additions and 45 deletions

View file

@ -17,7 +17,7 @@ class PaginatedRowsResult(BaseModel):
# the rows obey the DatasetSchema for the given dataset # the rows obey the DatasetSchema for the given dataset
rows: List[Dict[str, Any]] rows: List[Dict[str, Any]]
total_count: int total_count: int
next_page_token: Optional[int] = None next_page_token: Optional[str] = None
class DatasetStore(Protocol): class DatasetStore(Protocol):
@ -34,6 +34,6 @@ class DatasetIO(Protocol):
self, self,
dataset_id: str, dataset_id: str,
rows_in_page: int, rows_in_page: int,
page_token: Optional[int] = None, page_token: Optional[str] = None,
filter_condition: Optional[str] = None, filter_condition: Optional[str] = None,
) -> PaginatedRowsResult: ... ) -> PaginatedRowsResult: ...

View file

@ -50,11 +50,5 @@ class Datasets(Protocol):
dataset_identifier: str, dataset_identifier: str,
) -> Optional[DatasetDefWithProvider]: ... ) -> Optional[DatasetDefWithProvider]: ...
@webmethod(route="/datasets/delete")
async def delete_dataset(
self,
dataset_identifier: str,
) -> None: ...
@webmethod(route="/datasets/list", method="GET") @webmethod(route="/datasets/list", method="GET")
async def list_datasets(self) -> List[DatasetDefWithProvider]: ... async def list_datasets(self) -> List[DatasetDefWithProvider]: ...

View file

@ -222,7 +222,3 @@ class DatasetsRoutingTable(CommonRoutingTableImpl, Datasets):
async def register_dataset(self, dataset_def: DatasetDefWithProvider) -> None: async def register_dataset(self, dataset_def: DatasetDefWithProvider) -> None:
await self.register_object(dataset_def) await self.register_object(dataset_def)
async def delete_dataset(self, dataset_identifier: str) -> None:
# TODO: pass through for now
return

View file

@ -10,32 +10,21 @@ import pandas
from llama_models.llama3.api.datatypes import * # noqa: F403 from llama_models.llama3.api.datatypes import * # noqa: F403
from llama_stack.apis.datasetio import * # noqa: F403 from llama_stack.apis.datasetio import * # noqa: F403
from dataclasses import dataclass
from llama_stack.providers.datatypes import DatasetsProtocolPrivate from llama_stack.providers.datatypes import DatasetsProtocolPrivate
from llama_stack.providers.utils.datasetio.dataset_utils import BaseDataset from llama_stack.providers.utils.datasetio.dataset_utils import BaseDataset, DatasetInfo
from .config import MetaReferenceDatasetIOConfig from .config import MetaReferenceDatasetIOConfig
@dataclass class PandasDataframeDataset(BaseDataset):
class DatasetInfo:
dataset_def: DatasetDef
dataset_impl: BaseDataset
next_page_token: Optional[int] = None
class CustomDataset(BaseDataset):
def __init__(self, dataset_def: DatasetDef, *args, **kwargs) -> None: def __init__(self, dataset_def: DatasetDef, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.dataset_def = dataset_def self.dataset_def = dataset_def
# TODO: validate dataset_def against schema
self.df = None self.df = None
self.load()
def __len__(self) -> int: def __len__(self) -> int:
if self.df is None: if self.df is None:
self.load() raise ValueError("Dataset not loaded. Please call .load() first")
return len(self.df) return len(self.df)
def __getitem__(self, idx): def __getitem__(self, idx):
@ -91,10 +80,11 @@ class MetaReferenceDatasetioImpl(DatasetIO, DatasetsProtocolPrivate):
self, self,
dataset_def: DatasetDef, dataset_def: DatasetDef,
) -> None: ) -> None:
dataset_impl = PandasDataframeDataset(dataset_def)
dataset_impl.load()
self.dataset_infos[dataset_def.identifier] = DatasetInfo( self.dataset_infos[dataset_def.identifier] = DatasetInfo(
dataset_def=dataset_def, dataset_def=dataset_def,
dataset_impl=CustomDataset(dataset_def), dataset_impl=dataset_impl,
next_page_token=0,
) )
async def list_datasets(self) -> List[DatasetDef]: async def list_datasets(self) -> List[DatasetDef]:
@ -104,23 +94,24 @@ class MetaReferenceDatasetioImpl(DatasetIO, DatasetsProtocolPrivate):
self, self,
dataset_id: str, dataset_id: str,
rows_in_page: int, rows_in_page: int,
page_token: Optional[int] = None, page_token: Optional[str] = None,
filter_condition: Optional[str] = None, filter_condition: Optional[str] = None,
) -> PaginatedRowsResult: ) -> PaginatedRowsResult:
dataset_info = self.dataset_infos.get(dataset_id) dataset_info = self.dataset_infos.get(dataset_id)
if page_token is None: if page_token is None:
dataset_info.next_page_token = 0 next_page_token = 0
else:
next_page_token = int(page_token)
if rows_in_page == -1: if rows_in_page == -1:
rows = dataset_info.dataset_impl[dataset_info.next_page_token :] rows = dataset_info.dataset_impl[next_page_token:]
start = dataset_info.next_page_token start = next_page_token
end = min(start + rows_in_page, len(dataset_info.dataset_impl)) end = min(start + rows_in_page, len(dataset_info.dataset_impl))
rows = dataset_info.dataset_impl[start:end] rows = dataset_info.dataset_impl[start:end]
dataset_info.next_page_token = end
return PaginatedRowsResult( return PaginatedRowsResult(
rows=rows, rows=rows,
total_count=len(rows), total_count=len(rows),
next_page_token=dataset_info.next_page_token, next_page_token=str(end),
) )

View file

@ -19,13 +19,4 @@ def available_providers() -> List[ProviderSpec]:
config_class="llama_stack.providers.impls.meta_reference.datasetio.MetaReferenceDatasetIOConfig", config_class="llama_stack.providers.impls.meta_reference.datasetio.MetaReferenceDatasetIOConfig",
api_dependencies=[], api_dependencies=[],
), ),
remote_provider_spec(
api=Api.datasetio,
adapter=AdapterSpec(
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.datasetio.sample",
config_class="llama_stack.providers.adapters.datasetio.sample.SampleConfig",
),
),
] ]

View file

@ -95,7 +95,7 @@ async def test_get_rows_paginated(datasetio_settings):
assert isinstance(response.rows, list) assert isinstance(response.rows, list)
assert len(response.rows) == 3 assert len(response.rows) == 3
assert response.next_page_token == 3 assert response.next_page_token == "3"
# iterate over all rows # iterate over all rows
response = await datasetio_impl.get_rows_paginated( response = await datasetio_impl.get_rows_paginated(
@ -106,4 +106,4 @@ async def test_get_rows_paginated(datasetio_settings):
assert isinstance(response.rows, list) assert isinstance(response.rows, list)
assert len(response.rows) == 10 assert len(response.rows) == 10
assert response.next_page_token == 13 assert response.next_page_token == "13"

View file

@ -4,6 +4,8 @@
# This source code is licensed under the terms described in the LICENSE file in # This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree. # the root directory of this source tree.
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass
from llama_stack.apis.datasetio import * # noqa: F403
class BaseDataset(ABC): class BaseDataset(ABC):
@ -21,3 +23,9 @@ class BaseDataset(ABC):
@abstractmethod @abstractmethod
def load(self): def load(self):
raise NotImplementedError() raise NotImplementedError()
@dataclass
class DatasetInfo:
dataset_def: DatasetDef
dataset_impl: BaseDataset