add cancel endpoint

This commit is contained in:
Ishaan Jaff 2024-07-31 16:12:36 -07:00
parent 9b6231810b
commit 5f1070e47f

View file

@ -292,3 +292,103 @@ async def list_fine_tuning_jobs(
param=getattr(e, "param", "None"), param=getattr(e, "param", "None"),
code=getattr(e, "status_code", 500), code=getattr(e, "status_code", 500),
) )
@router.post(
"/v1/fine_tuning/jobs/{fine_tuning_job_id:path}/cancel",
dependencies=[Depends(user_api_key_auth)],
tags=["fine-tuning"],
)
async def retrieve_fine_tuning_job(
request: Request,
fastapi_response: Response,
custom_llm_provider: Literal["openai", "azure"],
fine_tuning_job_id: str,
user_api_key_dict: dict = Depends(user_api_key_auth),
):
"""
Cancel a fine-tuning job.
This is the equivalent of POST https://api.openai.com/v1/fine_tuning/jobs/{fine_tuning_job_id}/cancel
Supported Query Params:
- `custom_llm_provider`: Name of the LiteLLM provider
- `fine_tuning_job_id`: The ID of the fine-tuning job to cancel.
"""
from litellm.proxy.proxy_server import (
add_litellm_data_to_request,
general_settings,
get_custom_headers,
proxy_config,
proxy_logging_obj,
version,
)
data: dict = {}
try:
# Include original request and headers in the data
data = await add_litellm_data_to_request(
data=data,
request=request,
general_settings=general_settings,
user_api_key_dict=user_api_key_dict,
version=version,
proxy_config=proxy_config,
)
# get configs for custom_llm_provider
llm_provider_config = get_fine_tuning_provider_config(
custom_llm_provider=custom_llm_provider
)
data.update(llm_provider_config)
response = await litellm.acancel_fine_tuning_job(
**data,
fine_tuning_job_id=fine_tuning_job_id,
)
### RESPONSE HEADERS ###
hidden_params = getattr(response, "_hidden_params", {}) or {}
model_id = hidden_params.get("model_id", None) or ""
cache_key = hidden_params.get("cache_key", None) or ""
api_base = hidden_params.get("api_base", None) or ""
fastapi_response.headers.update(
get_custom_headers(
user_api_key_dict=user_api_key_dict,
model_id=model_id,
cache_key=cache_key,
api_base=api_base,
version=version,
model_region=getattr(user_api_key_dict, "allowed_model_region", ""),
)
)
return response
except Exception as e:
await proxy_logging_obj.post_call_failure_hook(
user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data
)
verbose_proxy_logger.error(
"litellm.proxy.proxy_server.list_fine_tuning_jobs(): Exception occurred - {}".format(
str(e)
)
)
verbose_proxy_logger.debug(traceback.format_exc())
if isinstance(e, HTTPException):
raise ProxyException(
message=getattr(e, "message", str(e.detail)),
type=getattr(e, "type", "None"),
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST),
)
else:
error_msg = f"{str(e)}"
raise ProxyException(
message=getattr(e, "message", error_msg),
type=getattr(e, "type", "None"),
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", 500),
)