chore: move ListBatchesResponse to models.py

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-11-20 12:41:24 +01:00
parent eb3cab1eec
commit 2fe24a6df8
No known key found for this signature in database
3 changed files with 21 additions and 22 deletions

View file

@ -14,9 +14,13 @@ from typing import Literal
from pydantic import BaseModel, Field
from llama_stack_api.batches import BatchObject, ListBatchesResponse
from llama_stack_api.schema_utils import json_schema_type
try:
from openai.types import Batch as BatchObject
except ImportError as e:
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
@json_schema_type
class CreateBatchRequest(BaseModel):
@ -33,5 +37,15 @@ class CreateBatchRequest(BaseModel):
)
# Re-export response models for convenience
__all__ = ["CreateBatchRequest", "BatchObject", "ListBatchesResponse"]
@json_schema_type
class ListBatchesResponse(BaseModel):
"""Response containing a list of batch objects."""
object: Literal["list"] = "list"
data: list[BatchObject] = Field(..., description="List of batch objects")
first_id: str | None = Field(default=None, description="ID of the first batch in the list")
last_id: str | None = Field(default=None, description="ID of the last batch in the list")
has_more: bool = Field(default=False, description="Whether there are more batches available")
__all__ = ["CreateBatchRequest", "ListBatchesResponse", "BatchObject"]