From ac816a6b255f0ba201fda17d49952014f91ebef0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Han?= Date: Fri, 21 Nov 2025 15:56:44 +0100 Subject: [PATCH] fix: move models.py to top-level init MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit All batch models are now exported from the top level for better discoverability and IDE support. Signed-off-by: Sébastien Han --- src/llama_stack_api/__init__.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/llama_stack_api/__init__.py b/src/llama_stack_api/__init__.py index b6fe2fd23..f919d2afd 100644 --- a/src/llama_stack_api/__init__.py +++ b/src/llama_stack_api/__init__.py @@ -26,7 +26,15 @@ from . import common # noqa: F401 # Import all public API symbols from .agents import Agents, ResponseGuardrail, ResponseGuardrailSpec -from .batches import Batches, BatchObject, ListBatchesResponse +from .batches import ( + Batches, + BatchObject, + CancelBatchRequest, + CreateBatchRequest, + ListBatchesRequest, + ListBatchesResponse, + RetrieveBatchRequest, +) from .benchmarks import ( Benchmark, BenchmarkInput, @@ -462,6 +470,9 @@ __all__ = [ "BasicScoringFnParams", "Batches", "BatchObject", + "CancelBatchRequest", + "CreateBatchRequest", + "ListBatchesRequest", "Benchmark", "BenchmarkConfig", "BenchmarkInput", @@ -555,6 +566,7 @@ __all__ = [ "LLMAsJudgeScoringFnParams", "LLMRAGQueryGeneratorConfig", "ListBatchesResponse", + "RetrieveBatchRequest", "ListBenchmarksResponse", "ListDatasetsResponse", "ListModelsResponse",