address comments

This commit is contained in:
Xi Yan 2024-10-23 22:17:38 -07:00
parent 59c93548bc
commit afa0c2b146
2 changed files with 15 additions and 11 deletions

View file

@ -61,13 +61,13 @@ class PandasDataframeDataset(BaseDataset):
else:
return self.df.iloc[idx].to_dict()
def _validate_dataset_schema(self) -> None:
assert self.df is not None, "Dataset not loaded. Please call .load() first"
def _validate_dataset_schema(self, df) -> pandas.DataFrame:
# note that we will drop any columns in dataset that are not in the schema
self.df = self.df[self.dataset_def.dataset_schema.keys()]
df = df[self.dataset_def.dataset_schema.keys()]
# check all columns in dataset schema are present
assert len(self.df.columns) == len(self.dataset_def.dataset_schema)
assert len(df.columns) == len(self.dataset_def.dataset_schema)
# TODO: type checking against column types in dataset schema
return df
def load(self) -> None:
if self.df is not None:
@ -99,8 +99,7 @@ class PandasDataframeDataset(BaseDataset):
else:
raise ValueError(f"Unsupported file type: {self.dataset_def.url}")
self.df = df
self._validate_dataset_schema()
self.df = self._validate_dataset_schema(df)
class MetaReferenceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
@ -136,7 +135,10 @@ class MetaReferenceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
dataset_info = self.dataset_infos.get(dataset_id)
dataset_info.dataset_impl.load()
if page_token is None or not page_token.isnumeric():
if page_token and not page_token.isnumeric():
raise ValueError("Invalid page_token")
if page_token is None:
next_page_token = 0
else:
next_page_token = int(page_token)