mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
(fix) safe access litellm_params, proxy_server_request
This commit is contained in:
parent
16f3d7e0ed
commit
5698be0df1
1 changed files with 2 additions and 2 deletions
|
@ -523,8 +523,8 @@ async def track_cost_callback(
|
|||
verbose_proxy_logger.debug(
|
||||
f"kwargs stream: {kwargs.get('stream', None)} + complete streaming response: {kwargs.get('complete_streaming_response', None)}"
|
||||
)
|
||||
litellm_params = kwargs.get("litellm_params", {})
|
||||
proxy_server_request = litellm_params.get("proxy_server_request")
|
||||
litellm_params = kwargs.get("litellm_params", {}) or {}
|
||||
proxy_server_request = litellm_params.get("proxy_server_request") or {}
|
||||
user_id = proxy_server_request.get("body", {}).get("user", None)
|
||||
if "complete_streaming_response" in kwargs:
|
||||
# for tracking streaming cost we pass the "messages" and the output_text to litellm.completion_cost
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue