feat(datasets api): (1.3/n) patch OpenAPI gen for datasetio->datasets (#1657)

# What does this PR do?
- We need to tag DatasetIO class correctly with Datasets with the
endpoint change

[//]: # (If resolving an issue, uncomment and update the line below)
[//]: # (Closes #[issue-number])

## Test Plan
**Before**
<img width="1474" alt="image"
src="https://github.com/user-attachments/assets/48737317-28a3-4aa6-a1b5-e1ea680cef84"
/>


**After**
<img width="1508" alt="image"
src="https://github.com/user-attachments/assets/123322f0-a52f-47ee-99a7-ecc66c1b09ec"
/>

[//]: # (## Documentation)
This commit is contained in:
Xi Yan 2025-03-15 14:12:45 -07:00 committed by GitHub
parent 5cb0ad7d7f
commit 72ccdc19a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 48 additions and 43 deletions

View file

@ -60,7 +60,7 @@
} }
}, },
"tags": [ "tags": [
"DatasetIO" "Datasets"
], ],
"description": "", "description": "",
"parameters": [ "parameters": [
@ -524,7 +524,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "List all buckets.", "description": "List all buckets.",
"parameters": [ "parameters": [
@ -564,7 +564,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "Create a new upload session for a file identified by a bucket and key.", "description": "Create a new upload session for a file identified by a bucket and key.",
"parameters": [], "parameters": [],
@ -791,7 +791,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "Get a file info identified by a bucket and key.", "description": "Get a file info identified by a bucket and key.",
"parameters": [ "parameters": [
@ -841,7 +841,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "Delete a file identified by a bucket and key.", "description": "Delete a file identified by a bucket and key.",
"parameters": [ "parameters": [
@ -1830,7 +1830,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "Returns information about an existsing upload session", "description": "Returns information about an existsing upload session",
"parameters": [ "parameters": [
@ -1878,7 +1878,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "Upload file content to an existing upload session. On the server, request body will have the raw bytes that are uploaded.", "description": "Upload file content to an existing upload session. On the server, request body will have the raw bytes that are uploaded.",
"parameters": [ "parameters": [
@ -2204,7 +2204,7 @@
} }
}, },
"tags": [ "tags": [
"DatasetIO" "Datasets"
], ],
"description": "Get a paginated list of rows from a dataset. Uses cursor-based pagination.", "description": "Get a paginated list of rows from a dataset. Uses cursor-based pagination.",
"parameters": [ "parameters": [
@ -2603,7 +2603,7 @@
} }
}, },
"tags": [ "tags": [
"Files (Coming Soon)" "Files"
], ],
"description": "List all files in a bucket.", "description": "List all files in a bucket.",
"parameters": [ "parameters": [
@ -10274,7 +10274,7 @@
"name": "Benchmarks" "name": "Benchmarks"
}, },
{ {
"name": "DatasetIO" "name": "Datasets"
}, },
{ {
"name": "Datasets" "name": "Datasets"
@ -10284,7 +10284,7 @@
"x-displayName": "Llama Stack Evaluation API for running evaluations on model and agent candidates." "x-displayName": "Llama Stack Evaluation API for running evaluations on model and agent candidates."
}, },
{ {
"name": "Files (Coming Soon)" "name": "Files"
}, },
{ {
"name": "Inference", "name": "Inference",
@ -10342,10 +10342,10 @@
"Agents", "Agents",
"BatchInference (Coming Soon)", "BatchInference (Coming Soon)",
"Benchmarks", "Benchmarks",
"DatasetIO", "Datasets",
"Datasets", "Datasets",
"Eval", "Eval",
"Files (Coming Soon)", "Files",
"Inference", "Inference",
"Inspect", "Inspect",
"Models", "Models",

View file

@ -26,7 +26,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- DatasetIO - Datasets
description: '' description: ''
parameters: parameters:
- name: dataset_id - name: dataset_id
@ -350,7 +350,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: List all buckets. description: List all buckets.
parameters: parameters:
- name: bucket - name: bucket
@ -377,7 +377,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: >- description: >-
Create a new upload session for a file identified by a bucket and key. Create a new upload session for a file identified by a bucket and key.
parameters: [] parameters: []
@ -536,7 +536,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: >- description: >-
Get a file info identified by a bucket and key. Get a file info identified by a bucket and key.
parameters: parameters:
@ -572,7 +572,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: >- description: >-
Delete a file identified by a bucket and key. Delete a file identified by a bucket and key.
parameters: parameters:
@ -1224,7 +1224,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: >- description: >-
Returns information about an existsing upload session Returns information about an existsing upload session
parameters: parameters:
@ -1255,7 +1255,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: >- description: >-
Upload file content to an existing upload session. On the server, request Upload file content to an existing upload session. On the server, request
body will have the raw bytes that are uploaded. body will have the raw bytes that are uploaded.
@ -1477,7 +1477,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- DatasetIO - Datasets
description: >- description: >-
Get a paginated list of rows from a dataset. Uses cursor-based pagination. Get a paginated list of rows from a dataset. Uses cursor-based pagination.
parameters: parameters:
@ -1754,7 +1754,7 @@ paths:
default: default:
$ref: '#/components/responses/DefaultError' $ref: '#/components/responses/DefaultError'
tags: tags:
- Files (Coming Soon) - Files
description: List all files in a bucket. description: List all files in a bucket.
parameters: parameters:
- name: bucket - name: bucket
@ -6931,12 +6931,12 @@ tags:
Agents API for creating and interacting with agentic systems. Agents API for creating and interacting with agentic systems.
- name: BatchInference (Coming Soon) - name: BatchInference (Coming Soon)
- name: Benchmarks - name: Benchmarks
- name: DatasetIO - name: Datasets
- name: Datasets - name: Datasets
- name: Eval - name: Eval
x-displayName: >- x-displayName: >-
Llama Stack Evaluation API for running evaluations on model and agent candidates. Llama Stack Evaluation API for running evaluations on model and agent candidates.
- name: Files (Coming Soon) - name: Files
- name: Inference - name: Inference
description: >- description: >-
This API provides the raw interface to the underlying models. Two kinds of models This API provides the raw interface to the underlying models. Two kinds of models
@ -6971,10 +6971,10 @@ x-tagGroups:
- Agents - Agents
- BatchInference (Coming Soon) - BatchInference (Coming Soon)
- Benchmarks - Benchmarks
- DatasetIO - Datasets
- Datasets - Datasets
- Eval - Eval
- Files (Coming Soon) - Files
- Inference - Inference
- Inspect - Inspect
- Models - Models

View file

@ -457,9 +457,9 @@ class Generator:
"status": 400, "status": 400,
"title": "Bad Request", "title": "Bad Request",
"detail": "The request was invalid or malformed", "detail": "The request was invalid or malformed",
} },
) )
} },
) )
self.responses["TooManyRequests429"] = Response( self.responses["TooManyRequests429"] = Response(
@ -471,9 +471,9 @@ class Generator:
"status": 429, "status": 429,
"title": "Too Many Requests", "title": "Too Many Requests",
"detail": "You have exceeded the rate limit. Please try again later.", "detail": "You have exceeded the rate limit. Please try again later.",
} },
) )
} },
) )
self.responses["InternalServerError500"] = Response( self.responses["InternalServerError500"] = Response(
@ -485,9 +485,9 @@ class Generator:
"status": 500, "status": 500,
"title": "Internal Server Error", "title": "Internal Server Error",
"detail": "An unexpected error occurred. Our team has been notified.", "detail": "An unexpected error occurred. Our team has been notified.",
} },
) )
} },
) )
# Add a default error response for any unhandled error cases # Add a default error response for any unhandled error cases
@ -500,9 +500,9 @@ class Generator:
"status": 0, "status": 0,
"title": "Error", "title": "Error",
"detail": "An unexpected error occurred", "detail": "An unexpected error occurred",
} },
) )
} },
) )
def _build_type_tag(self, ref: str, schema: Schema) -> Tag: def _build_type_tag(self, ref: str, schema: Schema) -> Tag:
@ -547,11 +547,14 @@ class Generator:
"SyntheticDataGeneration", "SyntheticDataGeneration",
"PostTraining", "PostTraining",
"BatchInference", "BatchInference",
"Files",
]: ]:
op.defining_class.__name__ = f"{op.defining_class.__name__} (Coming Soon)" op.defining_class.__name__ = f"{op.defining_class.__name__} (Coming Soon)"
print(op.defining_class.__name__) print(op.defining_class.__name__)
# TODO (xiyan): temporary fix for datasetio inner impl + datasets api
if op.defining_class.__name__ in ["DatasetIO"]:
op.defining_class.__name__ = "Datasets"
doc_string = parse_type(op.func_ref) doc_string = parse_type(op.func_ref)
doc_params = dict( doc_params = dict(
(param.name, param.description) for param in doc_string.params.values() (param.name, param.description) for param in doc_string.params.values()
@ -598,7 +601,9 @@ class Generator:
# data passed in request body as raw bytes cannot have request parameters # data passed in request body as raw bytes cannot have request parameters
if raw_bytes_request_body and op.request_params: if raw_bytes_request_body and op.request_params:
raise ValueError("Cannot have both raw bytes request body and request parameters") raise ValueError(
"Cannot have both raw bytes request body and request parameters"
)
# data passed in request body as raw bytes # data passed in request body as raw bytes
if raw_bytes_request_body: if raw_bytes_request_body: