mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
* fix(http_parsing_utils.py): remove `ast.literal_eval()` from http utils Security fix - https://huntr.com/bounties/96a32812-213c-4819-ba4e-36143d35e95b?token=bf414bbd77f8b346556e 64ab2dd9301ea44339910877ea50401c76f977e36cdd78272f5fb4ca852a88a7e832828aae1192df98680544ee24aa98f3cf6980d8 bab641a66b7ccbc02c0e7d4ddba2db4dbe7318889dc0098d8db2d639f345f574159814627bb084563bad472e2f990f825bff0878a9 e281e72c88b4bc5884d637d186c0d67c9987c57c3f0caf395aff07b89ad2b7220d1dd7d1b427fd2260b5f01090efce5250f8b56ea2 c0ec19916c24b23825d85ce119911275944c840a1340d69e23ca6a462da610 * fix(converse/transformation.py): support bedrock apac cross region inference Fixes https://github.com/BerriAI/litellm/issues/6905 * fix(user_api_key_auth.py): add auth check for websocket endpoint Fixes https://github.com/BerriAI/litellm/issues/6926 * fix(user_api_key_auth.py): use `model` from query param * fix: fix linting error * test: run flaky tests first
115 lines
3.7 KiB
Python
115 lines
3.7 KiB
Python
import ast
|
|
import json
|
|
from typing import Dict, List, Optional
|
|
|
|
from fastapi import Request, UploadFile, status
|
|
|
|
from litellm._logging import verbose_proxy_logger
|
|
from litellm.types.router import Deployment
|
|
|
|
|
|
async def _read_request_body(request: Optional[Request]) -> Dict:
|
|
"""
|
|
Safely read the request body and parse it as JSON.
|
|
|
|
Parameters:
|
|
- request: The request object to read the body from
|
|
|
|
Returns:
|
|
- dict: Parsed request data as a dictionary or an empty dictionary if parsing fails
|
|
"""
|
|
try:
|
|
if request is None:
|
|
return {}
|
|
|
|
# Read the request body
|
|
body = await request.body()
|
|
|
|
# Return empty dict if body is empty or None
|
|
if not body:
|
|
return {}
|
|
|
|
# Decode the body to a string
|
|
body_str = body.decode()
|
|
|
|
# Attempt JSON parsing (safe for untrusted input)
|
|
return json.loads(body_str)
|
|
|
|
except json.JSONDecodeError:
|
|
# Log detailed information for debugging
|
|
verbose_proxy_logger.exception("Invalid JSON payload received.")
|
|
return {}
|
|
|
|
except Exception as e:
|
|
# Catch unexpected errors to avoid crashes
|
|
verbose_proxy_logger.exception(
|
|
"Unexpected error reading request body - {}".format(e)
|
|
)
|
|
return {}
|
|
|
|
|
|
def check_file_size_under_limit(
|
|
request_data: dict,
|
|
file: UploadFile,
|
|
router_model_names: List[str],
|
|
) -> bool:
|
|
"""
|
|
Check if any files passed in request are under max_file_size_mb
|
|
|
|
Returns True -> when file size is under max_file_size_mb limit
|
|
Raises ProxyException -> when file size is over max_file_size_mb limit or not a premium_user
|
|
"""
|
|
from litellm.proxy.proxy_server import (
|
|
CommonProxyErrors,
|
|
ProxyException,
|
|
llm_router,
|
|
premium_user,
|
|
)
|
|
|
|
file_contents_size = file.size or 0
|
|
file_content_size_in_mb = file_contents_size / (1024 * 1024)
|
|
if "metadata" not in request_data:
|
|
request_data["metadata"] = {}
|
|
request_data["metadata"]["file_size_in_mb"] = file_content_size_in_mb
|
|
max_file_size_mb = None
|
|
|
|
if llm_router is not None and request_data["model"] in router_model_names:
|
|
try:
|
|
deployment: Optional[Deployment] = (
|
|
llm_router.get_deployment_by_model_group_name(
|
|
model_group_name=request_data["model"]
|
|
)
|
|
)
|
|
if (
|
|
deployment
|
|
and deployment.litellm_params is not None
|
|
and deployment.litellm_params.max_file_size_mb is not None
|
|
):
|
|
max_file_size_mb = deployment.litellm_params.max_file_size_mb
|
|
except Exception as e:
|
|
verbose_proxy_logger.error(
|
|
"Got error when checking file size: %s", (str(e))
|
|
)
|
|
|
|
if max_file_size_mb is not None:
|
|
verbose_proxy_logger.debug(
|
|
"Checking file size, file content size=%s, max_file_size_mb=%s",
|
|
file_content_size_in_mb,
|
|
max_file_size_mb,
|
|
)
|
|
if not premium_user:
|
|
raise ProxyException(
|
|
message=f"Tried setting max_file_size_mb for /audio/transcriptions. {CommonProxyErrors.not_premium_user.value}",
|
|
code=status.HTTP_400_BAD_REQUEST,
|
|
type="bad_request",
|
|
param="file",
|
|
)
|
|
if file_content_size_in_mb > max_file_size_mb:
|
|
raise ProxyException(
|
|
message=f"File size is too large. Please check your file size. Passed file size: {file_content_size_in_mb} MB. Max file size: {max_file_size_mb} MB",
|
|
code=status.HTTP_400_BAD_REQUEST,
|
|
type="bad_request",
|
|
param="file",
|
|
)
|
|
|
|
return True
|