mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
chore: move ListBatchesResponse to models.py
Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
eb3cab1eec
commit
2fe24a6df8
3 changed files with 21 additions and 22 deletions
|
|
@ -56,7 +56,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
||||||
request: Annotated[CreateBatchRequest, Body(...)],
|
request: Annotated[CreateBatchRequest, Body(...)],
|
||||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||||
) -> BatchObject:
|
) -> BatchObject:
|
||||||
"""Create a new batch."""
|
|
||||||
return await svc.create_batch(
|
return await svc.create_batch(
|
||||||
input_file_id=request.input_file_id,
|
input_file_id=request.input_file_id,
|
||||||
endpoint=request.endpoint,
|
endpoint=request.endpoint,
|
||||||
|
|
@ -78,7 +77,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
||||||
batch_id: str,
|
batch_id: str,
|
||||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||||
) -> BatchObject:
|
) -> BatchObject:
|
||||||
"""Retrieve information about a specific batch."""
|
|
||||||
return await svc.retrieve_batch(batch_id)
|
return await svc.retrieve_batch(batch_id)
|
||||||
|
|
||||||
@router.post(
|
@router.post(
|
||||||
|
|
@ -94,7 +92,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
||||||
batch_id: str,
|
batch_id: str,
|
||||||
svc: Annotated[Batches, Depends(get_batch_service)],
|
svc: Annotated[Batches, Depends(get_batch_service)],
|
||||||
) -> BatchObject:
|
) -> BatchObject:
|
||||||
"""Cancel a batch that is in progress."""
|
|
||||||
return await svc.cancel_batch(batch_id)
|
return await svc.cancel_batch(batch_id)
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
|
|
@ -111,7 +108,6 @@ def create_batches_router(impl_getter: Callable[[Api], Batches]) -> APIRouter:
|
||||||
after: str | None = None,
|
after: str | None = None,
|
||||||
limit: int = 20,
|
limit: int = 20,
|
||||||
) -> ListBatchesResponse:
|
) -> ListBatchesResponse:
|
||||||
"""List all batches for the current user."""
|
|
||||||
return await svc.list_batches(after=after, limit=limit)
|
return await svc.list_batches(after=after, limit=limit)
|
||||||
|
|
||||||
return router
|
return router
|
||||||
|
|
|
||||||
|
|
@ -6,31 +6,20 @@
|
||||||
|
|
||||||
"""Batches API protocol and models.
|
"""Batches API protocol and models.
|
||||||
|
|
||||||
This module contains the Batches protocol definition and related models.
|
This module contains the Batches protocol definition.
|
||||||
|
Pydantic models are defined in llama_stack_api.batches.models.
|
||||||
The router implementation is in llama_stack.core.server.routers.batches.
|
The router implementation is in llama_stack.core.server.routers.batches.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Literal, Protocol, runtime_checkable
|
from typing import Literal, Protocol, runtime_checkable
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from llama_stack_api.schema_utils import json_schema_type
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from openai.types import Batch as BatchObject
|
from openai.types import Batch as BatchObject
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
|
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
|
||||||
|
|
||||||
|
# Import models for re-export
|
||||||
@json_schema_type
|
from llama_stack_api.batches.models import ListBatchesResponse
|
||||||
class ListBatchesResponse(BaseModel):
|
|
||||||
"""Response containing a list of batch objects."""
|
|
||||||
|
|
||||||
object: Literal["list"] = "list"
|
|
||||||
data: list[BatchObject] = Field(..., description="List of batch objects")
|
|
||||||
first_id: str | None = Field(default=None, description="ID of the first batch in the list")
|
|
||||||
last_id: str | None = Field(default=None, description="ID of the last batch in the list")
|
|
||||||
has_more: bool = Field(default=False, description="Whether there are more batches available")
|
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
|
|
|
||||||
|
|
@ -14,9 +14,13 @@ from typing import Literal
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from llama_stack_api.batches import BatchObject, ListBatchesResponse
|
|
||||||
from llama_stack_api.schema_utils import json_schema_type
|
from llama_stack_api.schema_utils import json_schema_type
|
||||||
|
|
||||||
|
try:
|
||||||
|
from openai.types import Batch as BatchObject
|
||||||
|
except ImportError as e:
|
||||||
|
raise ImportError("OpenAI package is required for batches API. Please install it with: pip install openai") from e
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
@json_schema_type
|
||||||
class CreateBatchRequest(BaseModel):
|
class CreateBatchRequest(BaseModel):
|
||||||
|
|
@ -33,5 +37,15 @@ class CreateBatchRequest(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Re-export response models for convenience
|
@json_schema_type
|
||||||
__all__ = ["CreateBatchRequest", "BatchObject", "ListBatchesResponse"]
|
class ListBatchesResponse(BaseModel):
|
||||||
|
"""Response containing a list of batch objects."""
|
||||||
|
|
||||||
|
object: Literal["list"] = "list"
|
||||||
|
data: list[BatchObject] = Field(..., description="List of batch objects")
|
||||||
|
first_id: str | None = Field(default=None, description="ID of the first batch in the list")
|
||||||
|
last_id: str | None = Field(default=None, description="ID of the last batch in the list")
|
||||||
|
has_more: bool = Field(default=False, description="Whether there are more batches available")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["CreateBatchRequest", "ListBatchesResponse", "BatchObject"]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue