mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(Feat) - new endpoint GET /v1/fine_tuning/jobs/{fine_tuning_job_id:path}
(#7427)
* init commit ft jobs logging * add ft logging * add logging for FineTuningJob * simple FT Job create test * simplify Azure fine tuning to use all methods in OAI ft * update doc string * add aretrieve_fine_tuning_job * re use from litellm.proxy.utils import handle_exception_on_proxy * fix naming * add /fine_tuning/jobs/{fine_tuning_job_id:path} * remove unused imports * update func signature * run ci/cd again * ci/cd run again * fix code qulity * ci/cd run again
This commit is contained in:
parent
5e8c64f128
commit
2ece919f01
5 changed files with 400 additions and 227 deletions
|
@ -1,179 +1,48 @@
|
|||
from typing import Any, Coroutine, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
import httpx
|
||||
from openai import AsyncAzureOpenAI, AzureOpenAI
|
||||
from openai.types.fine_tuning import FineTuningJob
|
||||
from openai import AsyncAzureOpenAI, AsyncOpenAI, AzureOpenAI, OpenAI
|
||||
|
||||
from litellm._logging import verbose_logger
|
||||
from litellm.llms.azure.files.handler import get_azure_openai_client
|
||||
from litellm.llms.base import BaseLLM
|
||||
from litellm.llms.openai.fine_tuning.handler import OpenAIFineTuningAPI
|
||||
|
||||
|
||||
class AzureOpenAIFineTuningAPI(BaseLLM):
|
||||
class AzureOpenAIFineTuningAPI(OpenAIFineTuningAPI):
|
||||
"""
|
||||
AzureOpenAI methods to support for batches
|
||||
AzureOpenAI methods to support fine tuning, inherits from OpenAIFineTuningAPI.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
async def acreate_fine_tuning_job(
|
||||
def get_openai_client(
|
||||
self,
|
||||
create_fine_tuning_job_data: dict,
|
||||
openai_client: AsyncAzureOpenAI,
|
||||
) -> FineTuningJob:
|
||||
response = await openai_client.fine_tuning.jobs.create(
|
||||
**create_fine_tuning_job_data # type: ignore
|
||||
)
|
||||
return response
|
||||
|
||||
def create_fine_tuning_job(
|
||||
self,
|
||||
_is_async: bool,
|
||||
create_fine_tuning_job_data: dict,
|
||||
api_key: Optional[str],
|
||||
api_base: Optional[str],
|
||||
timeout: Union[float, httpx.Timeout],
|
||||
max_retries: Optional[int],
|
||||
organization: Optional[str] = None,
|
||||
client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
|
||||
organization: Optional[str],
|
||||
client: Optional[
|
||||
Union[OpenAI, AsyncOpenAI, AzureOpenAI, AsyncAzureOpenAI]
|
||||
] = None,
|
||||
_is_async: bool = False,
|
||||
api_version: Optional[str] = None,
|
||||
) -> Union[FineTuningJob, Coroutine[Any, Any, FineTuningJob]]:
|
||||
openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
|
||||
get_azure_openai_client(
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
timeout=timeout,
|
||||
max_retries=max_retries,
|
||||
organization=organization,
|
||||
api_version=api_version,
|
||||
client=client,
|
||||
_is_async=_is_async,
|
||||
)
|
||||
) -> Optional[
|
||||
Union[
|
||||
OpenAI,
|
||||
AsyncOpenAI,
|
||||
AzureOpenAI,
|
||||
AsyncAzureOpenAI,
|
||||
]
|
||||
]:
|
||||
# Override to use Azure-specific client initialization
|
||||
if isinstance(client, OpenAI) or isinstance(client, AsyncOpenAI):
|
||||
client = None
|
||||
|
||||
return get_azure_openai_client(
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
timeout=timeout,
|
||||
max_retries=max_retries,
|
||||
organization=organization,
|
||||
api_version=api_version,
|
||||
client=client,
|
||||
_is_async=_is_async,
|
||||
)
|
||||
if openai_client is None:
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||
)
|
||||
|
||||
if _is_async is True:
|
||||
if not isinstance(openai_client, AsyncAzureOpenAI):
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
|
||||
)
|
||||
return self.acreate_fine_tuning_job( # type: ignore
|
||||
create_fine_tuning_job_data=create_fine_tuning_job_data,
|
||||
openai_client=openai_client,
|
||||
)
|
||||
verbose_logger.debug(
|
||||
"creating fine tuning job, args= %s", create_fine_tuning_job_data
|
||||
)
|
||||
response = openai_client.fine_tuning.jobs.create(**create_fine_tuning_job_data) # type: ignore
|
||||
return response
|
||||
|
||||
async def acancel_fine_tuning_job(
|
||||
self,
|
||||
fine_tuning_job_id: str,
|
||||
openai_client: AsyncAzureOpenAI,
|
||||
) -> FineTuningJob:
|
||||
response = await openai_client.fine_tuning.jobs.cancel(
|
||||
fine_tuning_job_id=fine_tuning_job_id
|
||||
)
|
||||
return response
|
||||
|
||||
def cancel_fine_tuning_job(
|
||||
self,
|
||||
_is_async: bool,
|
||||
fine_tuning_job_id: str,
|
||||
api_key: Optional[str],
|
||||
api_base: Optional[str],
|
||||
timeout: Union[float, httpx.Timeout],
|
||||
max_retries: Optional[int],
|
||||
organization: Optional[str] = None,
|
||||
api_version: Optional[str] = None,
|
||||
client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
|
||||
):
|
||||
openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
|
||||
get_azure_openai_client(
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
api_version=api_version,
|
||||
timeout=timeout,
|
||||
max_retries=max_retries,
|
||||
organization=organization,
|
||||
client=client,
|
||||
_is_async=_is_async,
|
||||
)
|
||||
)
|
||||
if openai_client is None:
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||
)
|
||||
|
||||
if _is_async is True:
|
||||
if not isinstance(openai_client, AsyncAzureOpenAI):
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
|
||||
)
|
||||
return self.acancel_fine_tuning_job( # type: ignore
|
||||
fine_tuning_job_id=fine_tuning_job_id,
|
||||
openai_client=openai_client,
|
||||
)
|
||||
verbose_logger.debug("canceling fine tuning job, args= %s", fine_tuning_job_id)
|
||||
response = openai_client.fine_tuning.jobs.cancel(
|
||||
fine_tuning_job_id=fine_tuning_job_id
|
||||
)
|
||||
return response
|
||||
|
||||
async def alist_fine_tuning_jobs(
|
||||
self,
|
||||
openai_client: AsyncAzureOpenAI,
|
||||
after: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
):
|
||||
response = await openai_client.fine_tuning.jobs.list(after=after, limit=limit) # type: ignore
|
||||
return response
|
||||
|
||||
def list_fine_tuning_jobs(
|
||||
self,
|
||||
_is_async: bool,
|
||||
api_key: Optional[str],
|
||||
api_base: Optional[str],
|
||||
timeout: Union[float, httpx.Timeout],
|
||||
max_retries: Optional[int],
|
||||
organization: Optional[str] = None,
|
||||
client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
|
||||
api_version: Optional[str] = None,
|
||||
after: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
):
|
||||
openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
|
||||
get_azure_openai_client(
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
api_version=api_version,
|
||||
timeout=timeout,
|
||||
max_retries=max_retries,
|
||||
organization=organization,
|
||||
client=client,
|
||||
_is_async=_is_async,
|
||||
)
|
||||
)
|
||||
if openai_client is None:
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||
)
|
||||
|
||||
if _is_async is True:
|
||||
if not isinstance(openai_client, AsyncAzureOpenAI):
|
||||
raise ValueError(
|
||||
"AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
|
||||
)
|
||||
return self.alist_fine_tuning_jobs( # type: ignore
|
||||
after=after,
|
||||
limit=limit,
|
||||
openai_client=openai_client,
|
||||
)
|
||||
verbose_logger.debug("list fine tuning job, after= %s, limit= %s", after, limit)
|
||||
response = openai_client.fine_tuning.jobs.list(after=after, limit=limit) # type: ignore
|
||||
return response
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue