mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(proxy/utils.py): fix failure logging for rejected requests. + unit tests
This commit is contained in:
parent
06efe28132
commit
ec03e675c9
5 changed files with 113 additions and 18 deletions
|
@ -593,12 +593,41 @@ class ProxyLogging:
|
|||
)
|
||||
|
||||
if litellm_logging_obj is not None:
|
||||
## UPDATE LOGGING INPUT
|
||||
_optional_params = {}
|
||||
for k, v in request_data.items():
|
||||
if k != "model" and k != "user" and k != "litellm_params":
|
||||
_optional_params[k] = v
|
||||
litellm_logging_obj.update_environment_variables(
|
||||
model=request_data.get("model", ""),
|
||||
user=request_data.get("user", ""),
|
||||
optional_params=_optional_params,
|
||||
litellm_params=request_data.get("litellm_params", {}),
|
||||
)
|
||||
|
||||
input: Union[list, str, dict] = ""
|
||||
if "messages" in request_data and isinstance(
|
||||
request_data["messages"], list
|
||||
):
|
||||
input = request_data["messages"]
|
||||
elif "prompt" in request_data and isinstance(
|
||||
request_data["prompt"], str
|
||||
):
|
||||
input = request_data["prompt"]
|
||||
elif "input" in request_data and isinstance(
|
||||
request_data["input"], list
|
||||
):
|
||||
input = request_data["input"]
|
||||
|
||||
litellm_logging_obj.pre_call(
|
||||
input=input,
|
||||
api_key="",
|
||||
)
|
||||
|
||||
# log the custom exception
|
||||
await litellm_logging_obj.async_failure_handler(
|
||||
exception=original_exception,
|
||||
traceback_exception=traceback.format_exc(),
|
||||
start_time=time.time(),
|
||||
end_time=time.time(),
|
||||
)
|
||||
|
||||
threading.Thread(
|
||||
|
@ -606,8 +635,6 @@ class ProxyLogging:
|
|||
args=(
|
||||
original_exception,
|
||||
traceback.format_exc(),
|
||||
time.time(),
|
||||
time.time(),
|
||||
),
|
||||
).start()
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue