From cb300d30a95c7af0c6e5b1af80032a219628a820 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Wed, 10 Jul 2024 16:08:58 -0700 Subject: [PATCH] add file delete path --- .../openai_files_endpoints/files_endpoints.py | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/litellm/proxy/openai_files_endpoints/files_endpoints.py b/litellm/proxy/openai_files_endpoints/files_endpoints.py index 073bf4003..36972972b 100644 --- a/litellm/proxy/openai_files_endpoints/files_endpoints.py +++ b/litellm/proxy/openai_files_endpoints/files_endpoints.py @@ -265,6 +265,115 @@ async def get_file( ) +@router.delete( + "/v1/files/{file_id:path}", + dependencies=[Depends(user_api_key_auth)], + tags=["files"], +) +@router.delete( + "/files/{file_id:path}", + dependencies=[Depends(user_api_key_auth)], + tags=["files"], +) +async def delete_file( + request: Request, + fastapi_response: Response, + file_id: str, + user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth), +): + """ + Deletes a specified file. that can be used across - Assistants API, Batch API + This is the equivalent of DELETE https://api.openai.com/v1/files/{file_id} + + Supports Identical Params as: https://platform.openai.com/docs/api-reference/files/delete + + Example Curl + ``` + curl http://localhost:4000/v1/files/file-abc123 \ + -X DELETE \ + -H "Authorization: Bearer $OPENAI_API_KEY" + + ``` + """ + from litellm.proxy.proxy_server import ( + add_litellm_data_to_request, + general_settings, + get_custom_headers, + proxy_config, + proxy_logging_obj, + version, + ) + + data: Dict = {} + try: + + # Include original request and headers in the data + data = await add_litellm_data_to_request( + data=data, + request=request, + general_settings=general_settings, + user_api_key_dict=user_api_key_dict, + version=version, + proxy_config=proxy_config, + ) + + # for now use custom_llm_provider=="openai" -> this will change as LiteLLM adds more providers for acreate_batch + response = await litellm.afile_delete( + custom_llm_provider="openai", file_id=file_id, **data + ) + + ### ALERTING ### + asyncio.create_task( + proxy_logging_obj.update_request_status( + litellm_call_id=data.get("litellm_call_id", ""), status="success" + ) + ) + + ### RESPONSE HEADERS ### + hidden_params = getattr(response, "_hidden_params", {}) or {} + model_id = hidden_params.get("model_id", None) or "" + cache_key = hidden_params.get("cache_key", None) or "" + api_base = hidden_params.get("api_base", None) or "" + + fastapi_response.headers.update( + get_custom_headers( + user_api_key_dict=user_api_key_dict, + model_id=model_id, + cache_key=cache_key, + api_base=api_base, + version=version, + model_region=getattr(user_api_key_dict, "allowed_model_region", ""), + ) + ) + return response + + except Exception as e: + await proxy_logging_obj.post_call_failure_hook( + user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data + ) + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.retrieve_file(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) + if isinstance(e, HTTPException): + raise ProxyException( + message=getattr(e, "message", str(e.detail)), + type=getattr(e, "type", "None"), + param=getattr(e, "param", "None"), + code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), + ) + else: + error_msg = f"{str(e)}" + raise ProxyException( + message=getattr(e, "message", error_msg), + type=getattr(e, "type", "None"), + param=getattr(e, "param", "None"), + code=getattr(e, "status_code", 500), + ) + + @router.get( "/v1/files", dependencies=[Depends(user_api_key_auth)],