mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
chore: move ListBatchesResponse to models.py
Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
eb3cab1eec
commit
2fe24a6df8
3 changed files with 21 additions and 22 deletions
|
|
@ -56,7 +56,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
|||
request: Annotated[CreateBatchRequest, Body(...)],
|
||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||
) -> BatchObject:
|
||||
"""Create a new batch."""
|
||||
return await svc.create_batch(
|
||||
input_file_id=request.input_file_id,
|
||||
endpoint=request.endpoint,
|
||||
|
|
@ -78,7 +77,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
|||
batch_id: str,
|
||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||
) -> BatchObject:
|
||||
"""Retrieve information about a specific batch."""
|
||||
return await svc.retrieve_batch(batch_id)
|
||||
|
||||
@router.post(
|
||||
|
|
@ -94,7 +92,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
|||
batch_id: str,
|
||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||
) -> BatchObject:
|
||||
"""Cancel a batch that is in progress."""
|
||||
return await svc.cancel_batch(batch_id)
|
||||
|
||||
@router.get(
|
||||
|
|
@ -111,7 +108,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
|||
after: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> ListBatchesResponse:
|
||||
"""List all batches for the current user."""
|
||||
return await svc.list_batches(after=after, limit=limit)
|
||||
|
||||
return router
|
||||
|
|
|
|||
|
|
@ -6,31 +6,20 @@
|
|||
|
||||
"""Batches API protocol and models.
|
||||
|
||||
This module contains the Batches protocol definition and related models.
|
||||
This module contains the Batches protocol definition.
|
||||
Pydantic models are defined in llama_stack_api.batches.models.
|
||||
The router implementation is in llama_stack.core.server.routers.batches.
|
||||
"""
|
||||
|
||||
from typing import Literal, Protocol, runtime_checkable
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack_api.schema_utils import json_schema_type
|
||||
|
||||
try:
|
||||
from openai.types import Batch as BatchObject
|
||||
except ImportError as e:
|
||||
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ListBatchesResponse(BaseModel):
|
||||
"""Response containing a list of batch objects."""
|
||||
|
||||
object: Literal["list"] = "list"
|
||||
data: list[BatchObject] = Field(..., description="List of batch objects")
|
||||
first_id: str | None = Field(default=None, description="ID of the first batch in the list")
|
||||
last_id: str | None = Field(default=None, description="ID of the last batch in the list")
|
||||
has_more: bool = Field(default=False, description="Whether there are more batches available")
|
||||
# Import models for re-export
|
||||
from llama_stack_api.batches.models import ListBatchesResponse
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
|
|
|
|||
|
|
@ -14,9 +14,13 @@ from typing import Literal
|
|||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack_api.batches import BatchObject, ListBatchesResponse
|
||||
from llama_stack_api.schema_utils import json_schema_type
|
||||
|
||||
try:
|
||||
from openai.types import Batch as BatchObject
|
||||
except ImportError as e:
|
||||
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class CreateBatchRequest(BaseModel):
|
||||
|
|
@ -33,5 +37,15 @@ class CreateBatchRequest(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
# Re-export response models for convenience
|
||||
__all__ = ["CreateBatchRequest", "BatchObject", "ListBatchesResponse"]
|
||||
@json_schema_type
|
||||
class ListBatchesResponse(BaseModel):
|
||||
"""Response containing a list of batch objects."""
|
||||
|
||||
object: Literal["list"] = "list"
|
||||
data: list[BatchObject] = Field(..., description="List of batch objects")
|
||||
first_id: str | None = Field(default=None, description="ID of the first batch in the list")
|
||||
last_id: str | None = Field(default=None, description="ID of the last batch in the list")
|
||||
has_more: bool = Field(default=False, description="Whether there are more batches available")
|
||||
|
||||
|
||||
__all__ = ["CreateBatchRequest", "ListBatchesResponse", "BatchObject"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue