diff --git a/litellm/files/main.py b/litellm/files/main.py index b3fbd775f6..2440f6d180 100644 --- a/litellm/files/main.py +++ b/litellm/files/main.py @@ -198,7 +198,7 @@ async def afile_delete( def file_delete( file_id: str, - custom_llm_provider: Literal["openai"] = "openai", + custom_llm_provider: Literal["openai", "azure"] = "openai", extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, @@ -210,6 +210,22 @@ def file_delete( """ try: optional_params = GenericLiteLLMParams(**kwargs) + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + _is_async = kwargs.pop("is_async", False) is True if custom_llm_provider == "openai": # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there api_base = ( @@ -231,26 +247,6 @@ def file_delete( or litellm.openai_key or os.getenv("OPENAI_API_KEY") ) - ### TIMEOUT LOGIC ### - timeout = ( - optional_params.timeout or kwargs.get("request_timeout", 600) or 600 - ) - # set timeout for 10 minutes by default - - if ( - timeout is not None - and isinstance(timeout, httpx.Timeout) - and supports_httpx_timeout(custom_llm_provider) == False - ): - read_timeout = timeout.read or 600 - timeout = read_timeout # default 10 min timeout - elif timeout is not None and not isinstance(timeout, httpx.Timeout): - timeout = float(timeout) # type: ignore - elif timeout is None: - timeout = 600.0 - - _is_async = kwargs.pop("is_async", False) is True - response = openai_files_instance.delete_file( file_id=file_id, _is_async=_is_async, @@ -260,6 +256,38 @@ def file_delete( max_retries=optional_params.max_retries, organization=organization, ) + elif custom_llm_provider == "azure": + api_base = optional_params.api_base or litellm.api_base or get_secret("AZURE_API_BASE") # type: ignore + api_version = ( + optional_params.api_version + or litellm.api_version + or get_secret("AZURE_API_VERSION") + ) # type: ignore + + api_key = ( + optional_params.api_key + or litellm.api_key + or litellm.azure_key + or get_secret("AZURE_OPENAI_API_KEY") + or get_secret("AZURE_API_KEY") + ) # type: ignore + + extra_body = optional_params.get("extra_body", {}) + azure_ad_token: Optional[str] = None + if extra_body is not None: + azure_ad_token = extra_body.pop("azure_ad_token", None) + else: + azure_ad_token = get_secret("AZURE_AD_TOKEN") # type: ignore + + response = azure_files_instance.delete_file( + _is_async=_is_async, + api_base=api_base, + api_key=api_key, + api_version=api_version, + timeout=timeout, + max_retries=optional_params.max_retries, + file_id=file_id, + ) else: raise litellm.exceptions.BadRequestError( message="LiteLLM doesn't support {} for 'create_batch'. Only 'openai' is supported.".format( diff --git a/litellm/llms/files_apis/azure.py b/litellm/llms/files_apis/azure.py index d46d9225a4..c4c9ee48af 100644 --- a/litellm/llms/files_apis/azure.py +++ b/litellm/llms/files_apis/azure.py @@ -223,7 +223,7 @@ class AzureOpenAIFilesAPI(BaseLLM): api_key: Optional[str], timeout: Union[float, httpx.Timeout], max_retries: Optional[int], - organization: Optional[str], + organization: Optional[str] = None, api_version: Optional[str] = None, client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None, ): diff --git a/litellm/tests/test_fine_tuning_api.py b/litellm/tests/test_fine_tuning_api.py index 1f99d63582..f2bb73efde 100644 --- a/litellm/tests/test_fine_tuning_api.py +++ b/litellm/tests/test_fine_tuning_api.py @@ -122,20 +122,11 @@ async def test_azure_create_fine_tune_jobs_async(): _current_dir = os.path.dirname(os.path.abspath(__file__)) file_path = os.path.join(_current_dir, file_name) - file_obj = await litellm.acreate_file( - file=open(file_path, "rb"), - purpose="fine-tune", - custom_llm_provider="azure", - api_key=os.getenv("AZURE_SWEDEN_API_KEY"), - api_base="https://my-endpoint-sweden-berri992.openai.azure.com/", - ) - print("Response from creating file=", file_obj) - - await asyncio.sleep(5) + file_id = "file-5e4b20ecbd724182b9964f3cd2ab7212" create_fine_tuning_response = await litellm.acreate_fine_tuning_job( model="gpt-35-turbo-1106", - training_file=file_obj.id, + training_file=file_id, custom_llm_provider="azure", api_key=os.getenv("AZURE_SWEDEN_API_KEY"), api_base="https://my-endpoint-sweden-berri992.openai.azure.com/", @@ -156,12 +147,6 @@ async def test_azure_create_fine_tune_jobs_async(): ) print("response from litellm.list_fine_tuning_jobs=", ft_jobs) - # # delete file - - # await litellm.afile_delete( - # file_id=file_obj.id, - # ) - # cancel ft job response = await litellm.acancel_fine_tuning_job( fine_tuning_job_id=create_fine_tuning_response.id,