mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(managed_files.py): support for DELETE endpoint for files
This commit is contained in:
parent
cbcf028da5
commit
522ffd6e7c
3 changed files with 90 additions and 8 deletions
|
@ -565,7 +565,8 @@ async def get_file(
|
|||
code=500,
|
||||
)
|
||||
response = await managed_files_obj.afile_retrieve(
|
||||
file_id=file_id, litellm_parent_otel_span=user_api_key_dict.parent_otel_span, **data # type: ignore
|
||||
file_id=file_id,
|
||||
litellm_parent_otel_span=user_api_key_dict.parent_otel_span,
|
||||
)
|
||||
else:
|
||||
response = await litellm.afile_retrieve(
|
||||
|
@ -663,6 +664,7 @@ async def delete_file(
|
|||
from litellm.proxy.proxy_server import (
|
||||
add_litellm_data_to_request,
|
||||
general_settings,
|
||||
llm_router,
|
||||
proxy_config,
|
||||
proxy_logging_obj,
|
||||
version,
|
||||
|
@ -685,10 +687,41 @@ async def delete_file(
|
|||
proxy_config=proxy_config,
|
||||
)
|
||||
|
||||
response = await litellm.afile_delete(
|
||||
custom_llm_provider=custom_llm_provider, file_id=file_id, **data # type: ignore
|
||||
## check if file_id is a litellm managed file
|
||||
is_base64_unified_file_id = (
|
||||
_PROXY_LiteLLMManagedFiles._is_base64_encoded_unified_file_id(file_id)
|
||||
)
|
||||
|
||||
if is_base64_unified_file_id:
|
||||
managed_files_obj = cast(
|
||||
Optional[_PROXY_LiteLLMManagedFiles],
|
||||
proxy_logging_obj.get_proxy_hook("managed_files"),
|
||||
)
|
||||
if managed_files_obj is None:
|
||||
raise ProxyException(
|
||||
message="Managed files hook not found",
|
||||
type="None",
|
||||
param="None",
|
||||
code=500,
|
||||
)
|
||||
if llm_router is None:
|
||||
raise ProxyException(
|
||||
message="LLM Router not found",
|
||||
type="None",
|
||||
param="None",
|
||||
code=500,
|
||||
)
|
||||
response = await managed_files_obj.afile_delete(
|
||||
file_id=file_id,
|
||||
litellm_parent_otel_span=user_api_key_dict.parent_otel_span,
|
||||
llm_router=llm_router,
|
||||
**data,
|
||||
)
|
||||
else:
|
||||
response = await litellm.afile_delete(
|
||||
custom_llm_provider=custom_llm_provider, file_id=file_id, **data # type: ignore
|
||||
)
|
||||
|
||||
### ALERTING ###
|
||||
asyncio.create_task(
|
||||
proxy_logging_obj.update_request_status(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue