mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
Merge pull request #1898 from BerriAI/litellm_langfuse_error_logging
Litellm langfuse error logging - log input
This commit is contained in:
commit
95bf684a8c
1 changed files with 28 additions and 32 deletions
|
@ -266,37 +266,33 @@ class LangFuseLogger:
|
||||||
|
|
||||||
trace = self.Langfuse.trace(**trace_params)
|
trace = self.Langfuse.trace(**trace_params)
|
||||||
|
|
||||||
if level == "ERROR":
|
generation_id = None
|
||||||
trace.generation(
|
usage = None
|
||||||
level="ERROR", # can be any of DEBUG, DEFAULT, WARNING or ERROR
|
if response_obj is not None and response_obj.get("id", None) is not None:
|
||||||
status_message=output, # can be any string (e.g. stringified stack trace or error body)
|
generation_id = litellm.utils.get_logging_id(start_time, response_obj)
|
||||||
)
|
usage = {
|
||||||
print(f"SUCCESSFULLY LOGGED ERROR")
|
"prompt_tokens": response_obj["usage"]["prompt_tokens"],
|
||||||
else:
|
"completion_tokens": response_obj["usage"]["completion_tokens"],
|
||||||
# get generation_id
|
"total_cost": cost if supports_costs else None,
|
||||||
generation_id = None
|
}
|
||||||
if (
|
|
||||||
response_obj is not None
|
generation_params = {
|
||||||
and response_obj.get("id", None) is not None
|
"name": generation_name,
|
||||||
):
|
"id": metadata.get("generation_id", generation_id),
|
||||||
generation_id = litellm.utils.get_logging_id(
|
"startTime": start_time,
|
||||||
start_time, response_obj
|
"endTime": end_time,
|
||||||
)
|
"model": kwargs["model"],
|
||||||
trace.generation(
|
"modelParameters": optional_params,
|
||||||
name=generation_name,
|
"input": input,
|
||||||
id=metadata.get("generation_id", generation_id),
|
"output": output,
|
||||||
startTime=start_time,
|
"usage": usage,
|
||||||
endTime=end_time,
|
"metadata": metadata,
|
||||||
model=kwargs["model"],
|
"level": level,
|
||||||
modelParameters=optional_params,
|
}
|
||||||
input=input,
|
|
||||||
output=output,
|
if output is not None and isinstance(output, str) and level == "ERROR":
|
||||||
usage={
|
generation_params["statusMessage"] = output
|
||||||
"prompt_tokens": response_obj["usage"]["prompt_tokens"],
|
|
||||||
"completion_tokens": response_obj["usage"]["completion_tokens"],
|
trace.generation(**generation_params)
|
||||||
"total_cost": cost if supports_costs else None,
|
|
||||||
},
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Langfuse Layer Error - {traceback.format_exc()}")
|
print(f"Langfuse Layer Error - {traceback.format_exc()}")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue