diff --git a/litellm/batches/main.py b/litellm/batches/main.py index af2dc5059d..79aefa5f51 100644 --- a/litellm/batches/main.py +++ b/litellm/batches/main.py @@ -50,7 +50,7 @@ async def acreate_batch( extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, -) -> Coroutine[Any, Any, Batch]: +) -> Batch: """ Async: Creates and executes a batch from an uploaded file of request @@ -89,7 +89,7 @@ async def acreate_batch( def create_batch( completion_window: Literal["24h"], - endpoint: Literal["/v1/chat/completions", "/v1/embeddings"], + endpoint: Literal["/v1/chat/completions", "/v1/embeddings", "/v1/completions"], input_file_id: str, custom_llm_provider: Literal["openai"] = "openai", metadata: Optional[Dict[str, str]] = None, @@ -189,7 +189,7 @@ async def aretrieve_batch( extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, -) -> Coroutine[Any, Any, Batch]: +) -> Batch: """ Async: Retrieves a batch. diff --git a/litellm/types/llms/openai.py b/litellm/types/llms/openai.py index 42f1dac3d6..294e299dbf 100644 --- a/litellm/types/llms/openai.py +++ b/litellm/types/llms/openai.py @@ -257,7 +257,7 @@ class CreateBatchRequest(TypedDict, total=False): """ completion_window: Literal["24h"] - endpoint: Literal["/v1/chat/completions", "/v1/embeddings"] + endpoint: Literal["/v1/chat/completions", "/v1/embeddings", "/v1/completions"] input_file_id: str metadata: Optional[Dict[str, str]] extra_headers: Optional[Dict[str, str]]