mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 14:08:00 +00:00
add notes about batches development status to docs
this also captures other notes from agents, eval and inference apis
This commit is contained in:
parent
8e678912ec
commit
04a73c89ef
7 changed files with 56 additions and 1 deletions
|
@ -39,7 +39,14 @@ class ListBatchesResponse(BaseModel):
|
|||
|
||||
@runtime_checkable
|
||||
class Batches(Protocol):
|
||||
"""Protocol for batch processing API operations."""
|
||||
"""Protocol for batch processing API operations.
|
||||
|
||||
The Batches API enables efficient processing of multiple requests in a single operation,
|
||||
particularly useful for processing large datasets, batch evaluation workflows, and
|
||||
cost-effective inference at scale.
|
||||
|
||||
Note: This API is currently under active development and may undergo changes.
|
||||
"""
|
||||
|
||||
@webmethod(route="/openai/v1/batches", method="POST")
|
||||
async def create_batch(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue