forked from phoenix/litellm-mirror
fix(langfuse.py): don't overwrite trace details if existing trace id passed in
This commit is contained in:
parent
fc5a845838
commit
abdae87ba2
2 changed files with 217 additions and 12 deletions
|
@ -79,7 +79,7 @@ class LangFuseLogger:
|
|||
print_verbose,
|
||||
level="DEFAULT",
|
||||
status_message=None,
|
||||
):
|
||||
) -> dict:
|
||||
# Method definition
|
||||
|
||||
try:
|
||||
|
@ -111,6 +111,7 @@ class LangFuseLogger:
|
|||
pass
|
||||
|
||||
# end of processing langfuse ########################
|
||||
print(f"response obj type: {type(response_obj)}")
|
||||
if (
|
||||
level == "ERROR"
|
||||
and status_message is not None
|
||||
|
@ -140,8 +141,11 @@ class LangFuseLogger:
|
|||
input = prompt
|
||||
output = response_obj["data"]
|
||||
print_verbose(f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}")
|
||||
trace_id = None
|
||||
generation_id = None
|
||||
if self._is_langfuse_v2():
|
||||
self._log_langfuse_v2(
|
||||
print("INSIDE V2 LANGFUSE")
|
||||
trace_id, generation_id = self._log_langfuse_v2(
|
||||
user_id,
|
||||
metadata,
|
||||
litellm_params,
|
||||
|
@ -171,10 +175,12 @@ class LangFuseLogger:
|
|||
f"Langfuse Layer Logging - final response object: {response_obj}"
|
||||
)
|
||||
verbose_logger.info(f"Langfuse Layer Logging - logging success")
|
||||
|
||||
return {"trace_id": trace_id, "generation_id": generation_id}
|
||||
except:
|
||||
traceback.print_exc()
|
||||
verbose_logger.debug(f"Langfuse Layer Error - {traceback.format_exc()}")
|
||||
pass
|
||||
return {"trace_id": None, "generation_id": None}
|
||||
|
||||
async def _async_log_event(
|
||||
self, kwargs, response_obj, start_time, end_time, user_id, print_verbose
|
||||
|
@ -246,7 +252,7 @@ class LangFuseLogger:
|
|||
response_obj,
|
||||
level,
|
||||
print_verbose,
|
||||
):
|
||||
) -> tuple:
|
||||
import langfuse
|
||||
|
||||
try:
|
||||
|
@ -272,11 +278,14 @@ class LangFuseLogger:
|
|||
## DO NOT SET TRACE_NAME if trace-id set. this can lead to overwriting of past traces.
|
||||
trace_name = f"litellm-{kwargs.get('call_type', 'completion')}"
|
||||
|
||||
if existing_trace_id is not None:
|
||||
trace_params = {"trace_id": existing_trace_id}
|
||||
else: # don't overwrite an existing trace
|
||||
trace_params = {
|
||||
"name": trace_name,
|
||||
"input": input,
|
||||
"user_id": metadata.get("trace_user_id", user_id),
|
||||
"id": trace_id or existing_trace_id,
|
||||
"id": trace_id,
|
||||
"session_id": metadata.get("session_id", None),
|
||||
}
|
||||
|
||||
|
@ -414,6 +423,10 @@ class LangFuseLogger:
|
|||
|
||||
print_verbose(f"generation_params: {generation_params}")
|
||||
|
||||
trace.generation(**generation_params)
|
||||
generation_client = trace.generation(**generation_params)
|
||||
|
||||
print(f"LANGFUSE TRACE ID - {generation_client.trace_id}")
|
||||
return generation_client.trace_id, generation_id
|
||||
except Exception as e:
|
||||
verbose_logger.debug(f"Langfuse Layer Error - {traceback.format_exc()}")
|
||||
return None, None
|
||||
|
|
|
@ -348,6 +348,198 @@ def test_langfuse_logging_function_calling():
|
|||
# test_langfuse_logging_function_calling()
|
||||
|
||||
|
||||
def test_langfuse_existing_trace_id():
|
||||
"""
|
||||
When existing trace id is passed, don't set trace params -> prevents overwriting the trace
|
||||
|
||||
Pass 1 logging object with a trace
|
||||
|
||||
Pass 2nd logging object with the trace id
|
||||
|
||||
Assert no changes to the trace
|
||||
"""
|
||||
# Test - if the logs were sent to the correct team on langfuse
|
||||
import litellm, datetime
|
||||
from litellm.integrations.langfuse import LangFuseLogger
|
||||
|
||||
langfuse_Logger = LangFuseLogger(
|
||||
langfuse_public_key=os.getenv("LANGFUSE_PROJECT2_PUBLIC"),
|
||||
langfuse_secret=os.getenv("LANGFUSE_PROJECT2_SECRET"),
|
||||
)
|
||||
litellm.success_callback = ["langfuse"]
|
||||
|
||||
# langfuse_args = {'kwargs': { 'start_time': 'end_time': datetime.datetime(2024, 5, 1, 7, 31, 29, 903685), 'user_id': None, 'print_verbose': <function print_verbose at 0x109d1f420>, 'level': 'DEFAULT', 'status_message': None}
|
||||
response_obj = litellm.ModelResponse(
|
||||
id="chatcmpl-9K5HUAbVRqFrMZKXL0WoC295xhguY",
|
||||
choices=[
|
||||
litellm.Choices(
|
||||
finish_reason="stop",
|
||||
index=0,
|
||||
message=litellm.Message(
|
||||
content="I'm sorry, I am an AI assistant and do not have real-time information. I recommend checking a reliable weather website or app for the most up-to-date weather information in Boston.",
|
||||
role="assistant",
|
||||
),
|
||||
)
|
||||
],
|
||||
created=1714573888,
|
||||
model="gpt-3.5-turbo-0125",
|
||||
object="chat.completion",
|
||||
system_fingerprint="fp_3b956da36b",
|
||||
usage=litellm.Usage(completion_tokens=37, prompt_tokens=14, total_tokens=51),
|
||||
)
|
||||
|
||||
### NEW TRACE ###
|
||||
message = [{"role": "user", "content": "what's the weather in boston"}]
|
||||
langfuse_args = {
|
||||
"response_obj": response_obj,
|
||||
"kwargs": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"litellm_params": {
|
||||
"acompletion": False,
|
||||
"api_key": None,
|
||||
"force_timeout": 600,
|
||||
"logger_fn": None,
|
||||
"verbose": False,
|
||||
"custom_llm_provider": "openai",
|
||||
"api_base": "https://api.openai.com/v1/",
|
||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
||||
"model_alias_map": {},
|
||||
"completion_call_id": None,
|
||||
"metadata": None,
|
||||
"model_info": None,
|
||||
"proxy_server_request": None,
|
||||
"preset_cache_key": None,
|
||||
"no-log": False,
|
||||
"stream_response": {},
|
||||
},
|
||||
"messages": message,
|
||||
"optional_params": {"temperature": 0.1, "extra_body": {}},
|
||||
"start_time": "2024-05-01 07:31:27.986164",
|
||||
"stream": False,
|
||||
"user": None,
|
||||
"call_type": "completion",
|
||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
||||
"completion_start_time": "2024-05-01 07:31:29.903685",
|
||||
"temperature": 0.1,
|
||||
"extra_body": {},
|
||||
"input": [{"role": "user", "content": "what's the weather in boston"}],
|
||||
"api_key": "my-api-key",
|
||||
"additional_args": {
|
||||
"complete_input_dict": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"messages": [
|
||||
{"role": "user", "content": "what's the weather in boston"}
|
||||
],
|
||||
"temperature": 0.1,
|
||||
"extra_body": {},
|
||||
}
|
||||
},
|
||||
"log_event_type": "successful_api_call",
|
||||
"end_time": "2024-05-01 07:31:29.903685",
|
||||
"cache_hit": None,
|
||||
"response_cost": 6.25e-05,
|
||||
},
|
||||
"start_time": datetime.datetime(2024, 5, 1, 7, 31, 27, 986164),
|
||||
"end_time": datetime.datetime(2024, 5, 1, 7, 31, 29, 903685),
|
||||
"user_id": None,
|
||||
"print_verbose": litellm.print_verbose,
|
||||
"level": "DEFAULT",
|
||||
"status_message": None,
|
||||
}
|
||||
|
||||
langfuse_response_object = langfuse_Logger.log_event(**langfuse_args)
|
||||
|
||||
import langfuse
|
||||
|
||||
langfuse_client = langfuse.Langfuse(
|
||||
public_key=os.getenv("LANGFUSE_PROJECT2_PUBLIC"),
|
||||
secret_key=os.getenv("LANGFUSE_PROJECT2_SECRET"),
|
||||
)
|
||||
|
||||
trace_id = langfuse_response_object["trace_id"]
|
||||
|
||||
langfuse_client.flush()
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
print(langfuse_client.get_trace(id=trace_id))
|
||||
|
||||
initial_langfuse_trace = langfuse_client.get_trace(id=trace_id)
|
||||
|
||||
### EXISTING TRACE ###
|
||||
|
||||
new_metadata = {"existing_trace_id": trace_id}
|
||||
new_messages = [{"role": "user", "content": "What do you know?"}]
|
||||
langfuse_args = {
|
||||
"response_obj": response_obj,
|
||||
"kwargs": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"litellm_params": {
|
||||
"acompletion": False,
|
||||
"api_key": None,
|
||||
"force_timeout": 600,
|
||||
"logger_fn": None,
|
||||
"verbose": False,
|
||||
"custom_llm_provider": "openai",
|
||||
"api_base": "https://api.openai.com/v1/",
|
||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
||||
"model_alias_map": {},
|
||||
"completion_call_id": None,
|
||||
"metadata": new_metadata,
|
||||
"model_info": None,
|
||||
"proxy_server_request": None,
|
||||
"preset_cache_key": None,
|
||||
"no-log": False,
|
||||
"stream_response": {},
|
||||
},
|
||||
"messages": new_messages,
|
||||
"optional_params": {"temperature": 0.1, "extra_body": {}},
|
||||
"start_time": "2024-05-01 07:31:27.986164",
|
||||
"stream": False,
|
||||
"user": None,
|
||||
"call_type": "completion",
|
||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
||||
"completion_start_time": "2024-05-01 07:31:29.903685",
|
||||
"temperature": 0.1,
|
||||
"extra_body": {},
|
||||
"input": [{"role": "user", "content": "what's the weather in boston"}],
|
||||
"api_key": "my-api-key",
|
||||
"additional_args": {
|
||||
"complete_input_dict": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"messages": [
|
||||
{"role": "user", "content": "what's the weather in boston"}
|
||||
],
|
||||
"temperature": 0.1,
|
||||
"extra_body": {},
|
||||
}
|
||||
},
|
||||
"log_event_type": "successful_api_call",
|
||||
"end_time": "2024-05-01 07:31:29.903685",
|
||||
"cache_hit": None,
|
||||
"response_cost": 6.25e-05,
|
||||
},
|
||||
"start_time": datetime.datetime(2024, 5, 1, 7, 31, 27, 986164),
|
||||
"end_time": datetime.datetime(2024, 5, 1, 7, 31, 29, 903685),
|
||||
"user_id": None,
|
||||
"print_verbose": litellm.print_verbose,
|
||||
"level": "DEFAULT",
|
||||
"status_message": None,
|
||||
}
|
||||
|
||||
langfuse_response_object = langfuse_Logger.log_event(**langfuse_args)
|
||||
|
||||
langfuse_client.flush()
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
print(langfuse_client.get_trace(id=trace_id))
|
||||
|
||||
new_langfuse_trace = langfuse_client.get_trace(id=trace_id)
|
||||
|
||||
assert dict(initial_langfuse_trace) == dict(new_langfuse_trace)
|
||||
|
||||
|
||||
def test_langfuse_logging_tool_calling():
|
||||
litellm.set_verbose = True
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue