working create_batch

This commit is contained in:
Ishaan Jaff 2024-05-28 15:45:23 -07:00
parent 4dc7bfebd4
commit fc4ca265b8
4 changed files with 28 additions and 12 deletions

View file

@ -26,6 +26,7 @@ from ..types.llms.openai import (
CreateFileRequest, CreateFileRequest,
FileTypes, FileTypes,
FileObject, FileObject,
Batch,
) )
from typing import Literal, Optional, Dict from typing import Literal, Optional, Dict
@ -44,6 +45,11 @@ def create_file(
extra_body: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None,
**kwargs, **kwargs,
) -> FileObject: ) -> FileObject:
"""
Files are used to upload documents that can be used with features like Assistants, Fine-tuning, and Batch API.
LiteLLM Equivalent of POST: POST https://api.openai.com/v1/files
"""
try: try:
optional_params = GenericLiteLLMParams(**kwargs) optional_params = GenericLiteLLMParams(**kwargs)
if custom_llm_provider == "openai": if custom_llm_provider == "openai":
@ -127,7 +133,7 @@ def create_batch(
extra_headers: Optional[Dict[str, str]] = None, extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None,
**kwargs, **kwargs,
): ) -> Batch:
""" """
Creates and executes a batch from an uploaded file of request Creates and executes a batch from an uploaded file of request

View file

@ -1604,7 +1604,7 @@ class OpenAIBatchesAPI(BaseLLM):
max_retries: Optional[int], max_retries: Optional[int],
organization: Optional[str], organization: Optional[str],
client: Optional[OpenAI] = None, client: Optional[OpenAI] = None,
): ) -> Batch:
openai_client: OpenAI = self.get_openai_client( openai_client: OpenAI = self.get_openai_client(
api_key=api_key, api_key=api_key,
api_base=api_base, api_base=api_base,

View file

@ -20,6 +20,7 @@ def test_create_batch():
""" """
1. Create File for Batch completion 1. Create File for Batch completion
2. Create Batch Request 2. Create Batch Request
3. Retrieve the specific batch
""" """
file_obj = litellm.create_file( file_obj = litellm.create_file(
file=open("openai_batch_completions.jsonl", "rb"), file=open("openai_batch_completions.jsonl", "rb"),
@ -33,16 +34,25 @@ def test_create_batch():
batch_input_file_id is not None batch_input_file_id is not None
), "Failed to create file, expected a non null file_id but got {batch_input_file_id}" ), "Failed to create file, expected a non null file_id but got {batch_input_file_id}"
print("response from creating file=", file_obj) response = litellm.create_batch(
# response = create_batch( completion_window="24h",
# completion_window="24h", endpoint="/v1/chat/completions",
# endpoint="/v1/chat/completions", input_file_id=batch_input_file_id,
# input_file_id="1", custom_llm_provider="openai",
# custom_llm_provider="openai", metadata={"key1": "value1", "key2": "value2"},
# metadata={"key1": "value1", "key2": "value2"}, )
# )
print("response") print("response from litellm.create_batch=", response)
assert (
response.id is not None
), f"Failed to create batch, expected a non null batch_id but got {response.id}"
assert (
response.endpoint == "/v1/chat/completions"
), f"Failed to create batch, expected endpoint to be /v1/chat/completions but got {response.endpoint}"
assert (
response.input_file_id == batch_input_file_id
), f"Failed to create batch, expected input_file_id to be {batch_input_file_id} but got {response.input_file_id}"
pass pass

View file

@ -19,7 +19,7 @@ from openai.types.beta.threads.run import Run
from openai.types.beta.assistant import Assistant from openai.types.beta.assistant import Assistant
from openai.pagination import SyncCursorPage from openai.pagination import SyncCursorPage
from os import PathLike from os import PathLike
from openai.types import FileObject from openai.types import FileObject, Batch
from typing import TypedDict, List, Optional, Tuple, Mapping, IO from typing import TypedDict, List, Optional, Tuple, Mapping, IO