diff --git a/litellm/fine_tuning/main.py b/litellm/fine_tuning/main.py index 6fa2bd9fba..de899fe4bd 100644 --- a/litellm/fine_tuning/main.py +++ b/litellm/fine_tuning/main.py @@ -34,7 +34,7 @@ openai_fine_tuning_instance = OpenAIFineTuningAPI() async def acreate_fine_tuning_job( model: str, training_file: str, - hyperparameters: Optional[Hyperparameters] = None, + hyperparameters: Optional[Hyperparameters] = {}, suffix: Optional[str] = None, validation_file: Optional[str] = None, integrations: Optional[List[str]] = None, @@ -85,7 +85,7 @@ async def acreate_fine_tuning_job( def create_fine_tuning_job( model: str, training_file: str, - hyperparameters: Optional[Hyperparameters] = None, + hyperparameters: Optional[Hyperparameters] = {}, suffix: Optional[str] = None, validation_file: Optional[str] = None, integrations: Optional[List[str]] = None, diff --git a/litellm/llms/openai_fine_tuning/openai.py b/litellm/llms/openai_fine_tuning/openai.py new file mode 100644 index 0000000000..c964108328 --- /dev/null +++ b/litellm/llms/openai_fine_tuning/openai.py @@ -0,0 +1,96 @@ +from typing import Any, Coroutine, Optional, Union + +import httpx +from openai import AsyncOpenAI, OpenAI +from openai.types.fine_tuning import FineTuningJob + +from litellm._logging import verbose_logger +from litellm.llms.base import BaseLLM +from litellm.types.llms.openai import FineTuningJobCreate + + +class OpenAIFineTuningAPI(BaseLLM): + """ + OpenAI methods to support for batches + """ + + def __init__(self) -> None: + super().__init__() + + def get_openai_client( + self, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[Union[OpenAI, AsyncOpenAI]] = None, + _is_async: bool = False, + ) -> Optional[Union[OpenAI, AsyncOpenAI]]: + received_args = locals() + openai_client: Optional[Union[OpenAI, AsyncOpenAI]] = None + if client is None: + data = {} + for k, v in received_args.items(): + if k == "self" or k == "client" or k == "_is_async": + pass + elif k == "api_base" and v is not None: + data["base_url"] = v + elif v is not None: + data[k] = v + if _is_async is True: + openai_client = AsyncOpenAI(**data) + else: + openai_client = OpenAI(**data) # type: ignore + else: + openai_client = client + + return openai_client + + async def acreate_fine_tuning_job( + self, + create_fine_tuning_job_data: FineTuningJobCreate, + openai_client: AsyncOpenAI, + ) -> FineTuningJob: + response = await openai_client.batches.create(**create_fine_tuning_job_data) + return response + + def create_fine_tuning_job( + self, + _is_async: bool, + create_fine_tuning_job_data: FineTuningJobCreate, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[Union[OpenAI, AsyncOpenAI]] = None, + ) -> Union[Coroutine[Any, Any, FineTuningJob]]: + openai_client: Optional[Union[OpenAI, AsyncOpenAI]] = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + _is_async=_is_async, + ) + if openai_client is None: + raise ValueError( + "OpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment." + ) + + if _is_async is True: + if not isinstance(openai_client, AsyncOpenAI): + raise ValueError( + "OpenAI client is not an instance of AsyncOpenAI. Make sure you passed an AsyncOpenAI client." + ) + return self.acreate_fine_tuning_job( # type: ignore + create_fine_tuning_job_data=create_fine_tuning_job_data, + openai_client=openai_client, + ) + verbose_logger.debug( + "creating fine tuning job, args= %s", create_fine_tuning_job_data + ) + response = openai_client.fine_tuning.jobs.create(**create_fine_tuning_job_data) + return response