mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
[Fix Azure AI Studio] drop_params_from_unprocessable_entity_error (#5936)
* fix drop_params_from_unprocessable_entity_error * fix drop_params_from_unprocessable_entity_error for async azure ai requests * fix extra body reading azure ai studio
This commit is contained in:
parent
9ec3365ba6
commit
93cf9abb88
2 changed files with 50 additions and 42 deletions
45
litellm/llms/OpenAI/common_utils.py
Normal file
45
litellm/llms/OpenAI/common_utils.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
"""
|
||||||
|
Common helpers / utils across al OpenAI endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
import openai
|
||||||
|
|
||||||
|
|
||||||
|
####### Error Handling Utils for OpenAI API #######################
|
||||||
|
###################################################################
|
||||||
|
def drop_params_from_unprocessable_entity_error(
|
||||||
|
e: openai.UnprocessableEntityError, data: Dict[str, Any]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Helper function to read OpenAI UnprocessableEntityError and drop the params that raised an error from the error message.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
e (UnprocessableEntityError): The UnprocessableEntityError exception
|
||||||
|
data (Dict[str, Any]): The original data dictionary containing all parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: A new dictionary with invalid parameters removed
|
||||||
|
"""
|
||||||
|
invalid_params: List[str] = []
|
||||||
|
if e.body is not None and isinstance(e.body, dict) and e.body.get("message"):
|
||||||
|
message = e.body.get("message", {})
|
||||||
|
if isinstance(message, str):
|
||||||
|
try:
|
||||||
|
message = json.loads(message)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
message = {"detail": message}
|
||||||
|
detail = message.get("detail")
|
||||||
|
if isinstance(detail, List) and len(detail) > 0 and isinstance(detail[0], dict):
|
||||||
|
for error_dict in detail:
|
||||||
|
if (
|
||||||
|
error_dict.get("loc")
|
||||||
|
and isinstance(error_dict.get("loc"), list)
|
||||||
|
and len(error_dict.get("loc")) == 2
|
||||||
|
):
|
||||||
|
invalid_params.append(error_dict["loc"][1])
|
||||||
|
|
||||||
|
new_data = {k: v for k, v in data.items() if k not in invalid_params}
|
||||||
|
return new_data
|
|
@ -31,6 +31,7 @@ from litellm.utils import (
|
||||||
from ...types.llms.openai import *
|
from ...types.llms.openai import *
|
||||||
from ..base import BaseLLM
|
from ..base import BaseLLM
|
||||||
from ..prompt_templates.factory import custom_prompt, prompt_factory
|
from ..prompt_templates.factory import custom_prompt, prompt_factory
|
||||||
|
from .common_utils import drop_params_from_unprocessable_entity_error
|
||||||
|
|
||||||
|
|
||||||
class OpenAIError(Exception):
|
class OpenAIError(Exception):
|
||||||
|
@ -831,27 +832,9 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
except openai.UnprocessableEntityError as e:
|
except openai.UnprocessableEntityError as e:
|
||||||
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
|
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
|
||||||
if litellm.drop_params is True or drop_params is True:
|
if litellm.drop_params is True or drop_params is True:
|
||||||
invalid_params: List[str] = []
|
optional_params = drop_params_from_unprocessable_entity_error(
|
||||||
if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore
|
e, optional_params
|
||||||
detail = e.body.get("detail") # type: ignore
|
)
|
||||||
if (
|
|
||||||
isinstance(detail, List)
|
|
||||||
and len(detail) > 0
|
|
||||||
and isinstance(detail[0], dict)
|
|
||||||
):
|
|
||||||
for error_dict in detail:
|
|
||||||
if (
|
|
||||||
error_dict.get("loc")
|
|
||||||
and isinstance(error_dict.get("loc"), list)
|
|
||||||
and len(error_dict.get("loc")) == 2
|
|
||||||
):
|
|
||||||
invalid_params.append(error_dict["loc"][1])
|
|
||||||
|
|
||||||
new_data = {}
|
|
||||||
for k, v in optional_params.items():
|
|
||||||
if k not in invalid_params:
|
|
||||||
new_data[k] = v
|
|
||||||
optional_params = new_data
|
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
# e.message
|
# e.message
|
||||||
|
@ -967,27 +950,7 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
except openai.UnprocessableEntityError as e:
|
except openai.UnprocessableEntityError as e:
|
||||||
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
|
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
|
||||||
if litellm.drop_params is True or drop_params is True:
|
if litellm.drop_params is True or drop_params is True:
|
||||||
invalid_params: List[str] = []
|
data = drop_params_from_unprocessable_entity_error(e, data)
|
||||||
if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore
|
|
||||||
detail = e.body.get("detail") # type: ignore
|
|
||||||
if (
|
|
||||||
isinstance(detail, List)
|
|
||||||
and len(detail) > 0
|
|
||||||
and isinstance(detail[0], dict)
|
|
||||||
):
|
|
||||||
for error_dict in detail:
|
|
||||||
if (
|
|
||||||
error_dict.get("loc")
|
|
||||||
and isinstance(error_dict.get("loc"), list)
|
|
||||||
and len(error_dict.get("loc")) == 2
|
|
||||||
):
|
|
||||||
invalid_params.append(error_dict["loc"][1])
|
|
||||||
|
|
||||||
new_data = {}
|
|
||||||
for k, v in data.items():
|
|
||||||
if k not in invalid_params:
|
|
||||||
new_data[k] = v
|
|
||||||
data = new_data
|
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
# e.message
|
# e.message
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue