mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-07 11:08:20 +00:00
precommit
This commit is contained in:
parent
6a8bd19ba2
commit
cc48d9e9e6
5 changed files with 7 additions and 17 deletions
2
docs/_static/llama-stack-spec.html
vendored
2
docs/_static/llama-stack-spec.html
vendored
|
@ -2229,7 +2229,7 @@
|
|||
{
|
||||
"name": "limit",
|
||||
"in": "query",
|
||||
"description": "The number of rows to get per page.",
|
||||
"description": "The number of rows to get.",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
|
|
2
docs/_static/llama-stack-spec.yaml
vendored
2
docs/_static/llama-stack-spec.yaml
vendored
|
@ -1497,7 +1497,7 @@ paths:
|
|||
type: integer
|
||||
- name: limit
|
||||
in: query
|
||||
description: The number of rows to get per page.
|
||||
description: The number of rows to get.
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
|
|
|
@ -51,6 +51,4 @@ class DatasetIO(Protocol):
|
|||
...
|
||||
|
||||
@webmethod(route="/datasetio/append-rows/{dataset_id:path}", method="POST")
|
||||
async def append_rows(
|
||||
self, dataset_id: str, rows: List[Dict[str, Any]]
|
||||
) -> None: ...
|
||||
async def append_rows(self, dataset_id: str, rows: List[Dict[str, Any]]) -> None: ...
|
||||
|
|
|
@ -44,9 +44,7 @@ class PandasDataframeDataset:
|
|||
elif self.dataset_def.source.type == "rows":
|
||||
self.df = pandas.DataFrame(self.dataset_def.source.rows)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unsupported dataset source type: {self.dataset_def.source.type}"
|
||||
)
|
||||
raise ValueError(f"Unsupported dataset source type: {self.dataset_def.source.type}")
|
||||
|
||||
if self.df is None:
|
||||
raise ValueError(f"Failed to load dataset from {self.dataset_def.url}")
|
||||
|
@ -119,6 +117,4 @@ class LocalFSDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
|
|||
dataset_impl.load()
|
||||
|
||||
new_rows_df = pandas.DataFrame(rows)
|
||||
dataset_impl.df = pandas.concat(
|
||||
[dataset_impl.df, new_rows_df], ignore_index=True
|
||||
)
|
||||
dataset_impl.df = pandas.concat([dataset_impl.df, new_rows_df], ignore_index=True)
|
||||
|
|
|
@ -98,13 +98,9 @@ class HuggingfaceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
|
|||
new_dataset = hf_datasets.Dataset.from_list(rows)
|
||||
|
||||
# Concatenate the new rows with existing dataset
|
||||
updated_dataset = hf_datasets.concatenate_datasets(
|
||||
[loaded_dataset, new_dataset]
|
||||
)
|
||||
updated_dataset = hf_datasets.concatenate_datasets([loaded_dataset, new_dataset])
|
||||
|
||||
if dataset_def.metadata.get("path", None):
|
||||
updated_dataset.push_to_hub(dataset_def.metadata["path"])
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Uploading to URL-based datasets is not supported yet"
|
||||
)
|
||||
raise NotImplementedError("Uploading to URL-based datasets is not supported yet")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue