mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-10 04:08:31 +00:00
precommit
This commit is contained in:
parent
6a8bd19ba2
commit
cc48d9e9e6
5 changed files with 7 additions and 17 deletions
2
docs/_static/llama-stack-spec.html
vendored
2
docs/_static/llama-stack-spec.html
vendored
|
@ -2229,7 +2229,7 @@
|
||||||
{
|
{
|
||||||
"name": "limit",
|
"name": "limit",
|
||||||
"in": "query",
|
"in": "query",
|
||||||
"description": "The number of rows to get per page.",
|
"description": "The number of rows to get.",
|
||||||
"required": false,
|
"required": false,
|
||||||
"schema": {
|
"schema": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
|
2
docs/_static/llama-stack-spec.yaml
vendored
2
docs/_static/llama-stack-spec.yaml
vendored
|
@ -1497,7 +1497,7 @@ paths:
|
||||||
type: integer
|
type: integer
|
||||||
- name: limit
|
- name: limit
|
||||||
in: query
|
in: query
|
||||||
description: The number of rows to get per page.
|
description: The number of rows to get.
|
||||||
required: false
|
required: false
|
||||||
schema:
|
schema:
|
||||||
type: integer
|
type: integer
|
||||||
|
|
|
@ -51,6 +51,4 @@ class DatasetIO(Protocol):
|
||||||
...
|
...
|
||||||
|
|
||||||
@webmethod(route="/datasetio/append-rows/{dataset_id:path}", method="POST")
|
@webmethod(route="/datasetio/append-rows/{dataset_id:path}", method="POST")
|
||||||
async def append_rows(
|
async def append_rows(self, dataset_id: str, rows: List[Dict[str, Any]]) -> None: ...
|
||||||
self, dataset_id: str, rows: List[Dict[str, Any]]
|
|
||||||
) -> None: ...
|
|
||||||
|
|
|
@ -44,9 +44,7 @@ class PandasDataframeDataset:
|
||||||
elif self.dataset_def.source.type == "rows":
|
elif self.dataset_def.source.type == "rows":
|
||||||
self.df = pandas.DataFrame(self.dataset_def.source.rows)
|
self.df = pandas.DataFrame(self.dataset_def.source.rows)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(f"Unsupported dataset source type: {self.dataset_def.source.type}")
|
||||||
f"Unsupported dataset source type: {self.dataset_def.source.type}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.df is None:
|
if self.df is None:
|
||||||
raise ValueError(f"Failed to load dataset from {self.dataset_def.url}")
|
raise ValueError(f"Failed to load dataset from {self.dataset_def.url}")
|
||||||
|
@ -119,6 +117,4 @@ class LocalFSDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
|
||||||
dataset_impl.load()
|
dataset_impl.load()
|
||||||
|
|
||||||
new_rows_df = pandas.DataFrame(rows)
|
new_rows_df = pandas.DataFrame(rows)
|
||||||
dataset_impl.df = pandas.concat(
|
dataset_impl.df = pandas.concat([dataset_impl.df, new_rows_df], ignore_index=True)
|
||||||
[dataset_impl.df, new_rows_df], ignore_index=True
|
|
||||||
)
|
|
||||||
|
|
|
@ -98,13 +98,9 @@ class HuggingfaceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
|
||||||
new_dataset = hf_datasets.Dataset.from_list(rows)
|
new_dataset = hf_datasets.Dataset.from_list(rows)
|
||||||
|
|
||||||
# Concatenate the new rows with existing dataset
|
# Concatenate the new rows with existing dataset
|
||||||
updated_dataset = hf_datasets.concatenate_datasets(
|
updated_dataset = hf_datasets.concatenate_datasets([loaded_dataset, new_dataset])
|
||||||
[loaded_dataset, new_dataset]
|
|
||||||
)
|
|
||||||
|
|
||||||
if dataset_def.metadata.get("path", None):
|
if dataset_def.metadata.get("path", None):
|
||||||
updated_dataset.push_to_hub(dataset_def.metadata["path"])
|
updated_dataset.push_to_hub(dataset_def.metadata["path"])
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError("Uploading to URL-based datasets is not supported yet")
|
||||||
"Uploading to URL-based datasets is not supported yet"
|
|
||||||
)
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue