Litellm dev 04 10 2025 p3 (#9903)

* feat(managed_files.py): encode file type in unified file id

simplify calling gemini models

* fix(common_utils.py): fix extracting file type from unified file id

* fix(litellm_logging.py): create standard logging payload for create file call

* fix: fix linting error
This commit is contained in:
Krish Dholakia 2025-04-11 09:29:42 -07:00 committed by GitHub
parent 8ecd9ede81
commit 0415f1205e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 99 additions and 22 deletions

View file

@ -34,7 +34,11 @@ from litellm.proxy.common_utils.openai_endpoint_utils import (
from litellm.proxy.hooks.managed_files import _PROXY_LiteLLMManagedFiles
from litellm.proxy.utils import ProxyLogging
from litellm.router import Router
from litellm.types.llms.openai import OpenAIFileObject, OpenAIFilesPurpose
from litellm.types.llms.openai import (
CREATE_FILE_REQUESTS_PURPOSE,
OpenAIFileObject,
OpenAIFilesPurpose,
)
router = APIRouter()
@ -147,6 +151,7 @@ async def create_file_for_each_model(
responses.append(individual_response)
response = await _PROXY_LiteLLMManagedFiles.return_unified_file_id(
file_objects=responses,
create_file_request=_create_file_request,
purpose=purpose,
internal_usage_cache=proxy_logging_obj.internal_usage_cache,
litellm_parent_otel_span=user_api_key_dict.parent_otel_span,
@ -232,7 +237,7 @@ async def create_file(
# Cast purpose to OpenAIFilesPurpose type
purpose = cast(OpenAIFilesPurpose, purpose)
data = {"purpose": purpose}
data = {}
# Include original request and headers in the data
data = await add_litellm_data_to_request(
@ -258,7 +263,9 @@ async def create_file(
model=router_model, llm_router=llm_router
)
_create_file_request = CreateFileRequest(file=file_data, **data)
_create_file_request = CreateFileRequest(
file=file_data, purpose=cast(CREATE_FILE_REQUESTS_PURPOSE, purpose), **data
)
response: Optional[OpenAIFileObject] = None
if (