diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html index 2cec07632..748839691 100644 --- a/docs/_static/llama-stack-spec.html +++ b/docs/_static/llama-stack-spec.html @@ -2229,7 +2229,7 @@ { "name": "limit", "in": "query", - "description": "The number of rows to get per page.", + "description": "The number of rows to get.", "required": false, "schema": { "type": "integer" diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml index 843db26df..b607a0b7f 100644 --- a/docs/_static/llama-stack-spec.yaml +++ b/docs/_static/llama-stack-spec.yaml @@ -1497,7 +1497,7 @@ paths: type: integer - name: limit in: query - description: The number of rows to get per page. + description: The number of rows to get. required: false schema: type: integer diff --git a/llama_stack/apis/datasetio/datasetio.py b/llama_stack/apis/datasetio/datasetio.py index b1eaffa17..d9d86fe1b 100644 --- a/llama_stack/apis/datasetio/datasetio.py +++ b/llama_stack/apis/datasetio/datasetio.py @@ -51,6 +51,4 @@ class DatasetIO(Protocol): ... @webmethod(route="/datasetio/append-rows/{dataset_id:path}", method="POST") - async def append_rows( - self, dataset_id: str, rows: List[Dict[str, Any]] - ) -> None: ... + async def append_rows(self, dataset_id: str, rows: List[Dict[str, Any]]) -> None: ... diff --git a/llama_stack/providers/inline/datasetio/localfs/datasetio.py b/llama_stack/providers/inline/datasetio/localfs/datasetio.py index 958c7d387..cf4bf7fec 100644 --- a/llama_stack/providers/inline/datasetio/localfs/datasetio.py +++ b/llama_stack/providers/inline/datasetio/localfs/datasetio.py @@ -44,9 +44,7 @@ class PandasDataframeDataset: elif self.dataset_def.source.type == "rows": self.df = pandas.DataFrame(self.dataset_def.source.rows) else: - raise ValueError( - f"Unsupported dataset source type: {self.dataset_def.source.type}" - ) + raise ValueError(f"Unsupported dataset source type: {self.dataset_def.source.type}") if self.df is None: raise ValueError(f"Failed to load dataset from {self.dataset_def.url}") @@ -119,6 +117,4 @@ class LocalFSDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate): dataset_impl.load() new_rows_df = pandas.DataFrame(rows) - dataset_impl.df = pandas.concat( - [dataset_impl.df, new_rows_df], ignore_index=True - ) + dataset_impl.df = pandas.concat([dataset_impl.df, new_rows_df], ignore_index=True) diff --git a/llama_stack/providers/remote/datasetio/huggingface/huggingface.py b/llama_stack/providers/remote/datasetio/huggingface/huggingface.py index db6edbce3..fe3195332 100644 --- a/llama_stack/providers/remote/datasetio/huggingface/huggingface.py +++ b/llama_stack/providers/remote/datasetio/huggingface/huggingface.py @@ -98,13 +98,9 @@ class HuggingfaceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate): new_dataset = hf_datasets.Dataset.from_list(rows) # Concatenate the new rows with existing dataset - updated_dataset = hf_datasets.concatenate_datasets( - [loaded_dataset, new_dataset] - ) + updated_dataset = hf_datasets.concatenate_datasets([loaded_dataset, new_dataset]) if dataset_def.metadata.get("path", None): updated_dataset.push_to_hub(dataset_def.metadata["path"]) else: - raise NotImplementedError( - "Uploading to URL-based datasets is not supported yet" - ) + raise NotImplementedError("Uploading to URL-based datasets is not supported yet")