fix(slack_alerting.py): show langfuse traces on error messages

This commit is contained in:
Krrish Dholakia 2024-05-17 18:35:03 -07:00
parent 29228c202a
commit 1ccc604953
4 changed files with 59 additions and 19 deletions

View file

@ -93,6 +93,7 @@ class LangFuseLogger:
) )
litellm_params = kwargs.get("litellm_params", {}) litellm_params = kwargs.get("litellm_params", {})
litellm_call_id = kwargs.get("litellm_call_id", None)
metadata = ( metadata = (
litellm_params.get("metadata", {}) or {} litellm_params.get("metadata", {}) or {}
) # if litellm_params['metadata'] == None ) # if litellm_params['metadata'] == None
@ -161,6 +162,7 @@ class LangFuseLogger:
response_obj, response_obj,
level, level,
print_verbose, print_verbose,
litellm_call_id,
) )
elif response_obj is not None: elif response_obj is not None:
self._log_langfuse_v1( self._log_langfuse_v1(
@ -255,6 +257,7 @@ class LangFuseLogger:
response_obj, response_obj,
level, level,
print_verbose, print_verbose,
litellm_call_id,
) -> tuple: ) -> tuple:
import langfuse import langfuse
@ -318,7 +321,7 @@ class LangFuseLogger:
session_id = clean_metadata.pop("session_id", None) session_id = clean_metadata.pop("session_id", None)
trace_name = clean_metadata.pop("trace_name", None) trace_name = clean_metadata.pop("trace_name", None)
trace_id = clean_metadata.pop("trace_id", None) trace_id = clean_metadata.pop("trace_id", litellm_call_id)
existing_trace_id = clean_metadata.pop("existing_trace_id", None) existing_trace_id = clean_metadata.pop("existing_trace_id", None)
update_trace_keys = clean_metadata.pop("update_trace_keys", []) update_trace_keys = clean_metadata.pop("update_trace_keys", [])
debug = clean_metadata.pop("debug_langfuse", None) debug = clean_metadata.pop("debug_langfuse", None)
@ -351,9 +354,13 @@ class LangFuseLogger:
# Special keys that are found in the function arguments and not the metadata # Special keys that are found in the function arguments and not the metadata
if "input" in update_trace_keys: if "input" in update_trace_keys:
trace_params["input"] = input if not mask_input else "redacted-by-litellm" trace_params["input"] = (
input if not mask_input else "redacted-by-litellm"
)
if "output" in update_trace_keys: if "output" in update_trace_keys:
trace_params["output"] = output if not mask_output else "redacted-by-litellm" trace_params["output"] = (
output if not mask_output else "redacted-by-litellm"
)
else: # don't overwrite an existing trace else: # don't overwrite an existing trace
trace_params = { trace_params = {
"id": trace_id, "id": trace_id,
@ -375,7 +382,9 @@ class LangFuseLogger:
if level == "ERROR": if level == "ERROR":
trace_params["status_message"] = output trace_params["status_message"] = output
else: else:
trace_params["output"] = output if not mask_output else "redacted-by-litellm" trace_params["output"] = (
output if not mask_output else "redacted-by-litellm"
)
if debug == True or (isinstance(debug, str) and debug.lower() == "true"): if debug == True or (isinstance(debug, str) and debug.lower() == "true"):
if "metadata" in trace_params: if "metadata" in trace_params:

View file

@ -164,13 +164,28 @@ class SlackAlerting(CustomLogger):
) -> Optional[str]: ) -> Optional[str]:
""" """
Returns langfuse trace url Returns langfuse trace url
- check:
-> existing_trace_id
-> trace_id
-> litellm_call_id
""" """
# do nothing for now # do nothing for now
if ( if request_data is not None:
request_data is not None trace_id = None
and request_data.get("metadata", {}).get("trace_id", None) is not None if (
): request_data.get("metadata", {}).get("existing_trace_id", None)
trace_id = request_data["metadata"]["trace_id"] is not None
):
trace_id = request_data["metadata"]["existing_trace_id"]
elif request_data.get("metadata", {}).get("trace_id", None) is not None:
trace_id = request_data["metadata"]["trace_id"]
elif request_data.get("litellm_logging_obj", None) is not None and hasattr(
request_data["litellm_logging_obj"], "model_call_details"
):
trace_id = request_data["litellm_logging_obj"].model_call_details[
"litellm_call_id"
]
if litellm.utils.langFuseLogger is not None: if litellm.utils.langFuseLogger is not None:
base_url = litellm.utils.langFuseLogger.Langfuse.base_url base_url = litellm.utils.langFuseLogger.Langfuse.base_url
return f"{base_url}/trace/{trace_id}" return f"{base_url}/trace/{trace_id}"

View file

@ -1,4 +1,9 @@
model_list: model_list:
- model_name: gpt-3.5-turbo-fake-model
litellm_params:
model: openai/my-fake-model
api_base: http://0.0.0.0:8080
api_key: ""
- model_name: gpt-3.5-turbo - model_name: gpt-3.5-turbo
litellm_params: litellm_params:
model: azure/gpt-35-turbo model: azure/gpt-35-turbo
@ -14,5 +19,10 @@ model_list:
router_settings: router_settings:
enable_pre_call_checks: true enable_pre_call_checks: true
general_settings: litellm_settings:
master_key: sk-1234 # [OPTIONAL] Use to enforce auth on proxy. See - https://docs.litellm.ai/docs/proxy/virtual_keys success_callback: ["langfuse"]
failure_callback: ["langfuse"]
general_settings:
alerting: ["slack"]

View file

@ -671,15 +671,21 @@ async def user_api_key_auth(
_end_user_object = None _end_user_object = None
end_user_params = {} end_user_params = {}
if "user" in request_data: if "user" in request_data:
_end_user_object = await get_end_user_object( try:
end_user_id=request_data["user"], _end_user_object = await get_end_user_object(
prisma_client=prisma_client, end_user_id=request_data["user"],
user_api_key_cache=user_api_key_cache, prisma_client=prisma_client,
) user_api_key_cache=user_api_key_cache,
if _end_user_object is not None:
end_user_params["allowed_model_region"] = (
_end_user_object.allowed_model_region
) )
if _end_user_object is not None:
end_user_params["allowed_model_region"] = (
_end_user_object.allowed_model_region
)
except Exception as e:
verbose_proxy_logger.debug(
"Unable to find user in db. Error - {}".format(str(e))
)
pass
try: try:
is_master_key_valid = secrets.compare_digest(api_key, master_key) # type: ignore is_master_key_valid = secrets.compare_digest(api_key, master_key) # type: ignore