Support CRUD endpoints for Managed Files (#9924)

* fix(openai.py): ensure openai file object shows up on logs

* fix(managed_files.py): return unified file id as b64 str

allows retrieve file id to work as expected

* fix(managed_files.py): apply decoded file id transformation

* fix: add unit test for file id + decode logic

* fix: initial commit for litellm_proxy support with CRUD Endpoints

* fix(managed_files.py): support retrieve file operation

* fix(managed_files.py): support for DELETE endpoint for files

* fix(managed_files.py): retrieve file content support

supports retrieve file content api from openai

* fix: fix linting error

* test: update tests

* fix: fix linting error

* fix(files/main.py): pass litellm params to azure route

* test: fix test
This commit is contained in:
Krish Dholakia 2025-04-11 21:48:27 -07:00 committed by GitHub
parent 7bdbf3bbb6
commit c5c7c117af
14 changed files with 783 additions and 86 deletions

View file

@ -729,6 +729,12 @@ class Router:
self.aresponses = self.factory_function(
litellm.aresponses, call_type="aresponses"
)
self.afile_delete = self.factory_function(
litellm.afile_delete, call_type="afile_delete"
)
self.afile_content = self.factory_function(
litellm.afile_content, call_type="afile_content"
)
self.responses = self.factory_function(litellm.responses, call_type="responses")
def validate_fallbacks(self, fallback_param: Optional[List]):
@ -2435,6 +2441,8 @@ class Router:
model_name = data["model"]
self.total_calls[model_name] += 1
### get custom
response = original_function(
**{
**data,
@ -2514,9 +2522,15 @@ class Router:
# Perform pre-call checks for routing strategy
self.routing_strategy_pre_call_checks(deployment=deployment)
try:
_, custom_llm_provider, _, _ = get_llm_provider(model=data["model"])
except Exception:
custom_llm_provider = None
response = original_function(
**{
**data,
"custom_llm_provider": custom_llm_provider,
"caching": self.cache_responses,
**kwargs,
}
@ -3058,6 +3072,8 @@ class Router:
"anthropic_messages",
"aresponses",
"responses",
"afile_delete",
"afile_content",
] = "assistants",
):
"""
@ -3102,11 +3118,21 @@ class Router:
return await self._pass_through_moderation_endpoint_factory(
original_function=original_function, **kwargs
)
elif call_type in ("anthropic_messages", "aresponses"):
elif call_type in (
"anthropic_messages",
"aresponses",
):
return await self._ageneric_api_call_with_fallbacks(
original_function=original_function,
**kwargs,
)
elif call_type in ("afile_delete", "afile_content"):
return await self._ageneric_api_call_with_fallbacks(
original_function=original_function,
custom_llm_provider=custom_llm_provider,
client=client,
**kwargs,
)
return async_wrapper