fix(managed_files.py): retrieve file content support

supports retrieve file content api from openai
This commit is contained in:
Krrish Dholakia 2025-04-11 15:51:47 -07:00
parent 522ffd6e7c
commit 2ec2151537
3 changed files with 80 additions and 4 deletions

View file

@ -399,6 +399,7 @@ async def get_file_content(
from litellm.proxy.proxy_server import (
add_litellm_data_to_request,
general_settings,
llm_router,
proxy_config,
proxy_logging_obj,
version,
@ -421,9 +422,40 @@ async def get_file_content(
or await get_custom_llm_provider_from_request_body(request=request)
or "openai"
)
response = await litellm.afile_content(
custom_llm_provider=custom_llm_provider, file_id=file_id, **data # type: ignore
## check if file_id is a litellm managed file
is_base64_unified_file_id = (
_PROXY_LiteLLMManagedFiles._is_base64_encoded_unified_file_id(file_id)
)
if is_base64_unified_file_id:
managed_files_obj = cast(
Optional[_PROXY_LiteLLMManagedFiles],
proxy_logging_obj.get_proxy_hook("managed_files"),
)
if managed_files_obj is None:
raise ProxyException(
message="Managed files hook not found",
type="None",
param="None",
code=500,
)
if llm_router is None:
raise ProxyException(
message="LLM Router not found",
type="None",
param="None",
code=500,
)
response = await managed_files_obj.afile_content(
file_id=file_id,
litellm_parent_otel_span=user_api_key_dict.parent_otel_span,
llm_router=llm_router,
**data,
)
else:
response = await litellm.afile_content(
custom_llm_provider=custom_llm_provider, file_id=file_id, **data # type: ignore
)
### ALERTING ###
asyncio.create_task(