diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 046a6bcb2f..60c32e212b 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -279,7 +279,7 @@ class LangFuseLogger: trace_name = f"litellm-{kwargs.get('call_type', 'completion')}" if existing_trace_id is not None: - trace_params = {"trace_id": existing_trace_id} + trace_params = {"id": existing_trace_id} else: # don't overwrite an existing trace trace_params = { "name": trace_name, @@ -289,10 +289,10 @@ class LangFuseLogger: "session_id": metadata.get("session_id", None), } - if level == "ERROR": - trace_params["status_message"] = output - else: - trace_params["output"] = output + if level == "ERROR": + trace_params["status_message"] = output + else: + trace_params["output"] = output cost = kwargs.get("response_cost", None) print_verbose(f"trace: {cost}") @@ -350,7 +350,8 @@ class LangFuseLogger: kwargs["cache_hit"] = False tags.append(f"cache_hit:{kwargs['cache_hit']}") clean_metadata["cache_hit"] = kwargs["cache_hit"] - trace_params.update({"tags": tags}) + if existing_trace_id is None: + trace_params.update({"tags": tags}) proxy_server_request = litellm_params.get("proxy_server_request", None) if proxy_server_request: @@ -372,6 +373,7 @@ class LangFuseLogger: print_verbose(f"trace_params: {trace_params}") + print(f"trace_params: {trace_params}") trace = self.Langfuse.trace(**trace_params) generation_id = None diff --git a/litellm/tests/test_alangfuse.py b/litellm/tests/test_alangfuse.py index f0fcf2575e..29718d4746 100644 --- a/litellm/tests/test_alangfuse.py +++ b/litellm/tests/test_alangfuse.py @@ -470,8 +470,26 @@ def test_langfuse_existing_trace_id(): new_metadata = {"existing_trace_id": trace_id} new_messages = [{"role": "user", "content": "What do you know?"}] + new_response_obj = litellm.ModelResponse( + id="chatcmpl-9K5HUAbVRqFrMZKXL0WoC295xhguY", + choices=[ + litellm.Choices( + finish_reason="stop", + index=0, + message=litellm.Message( + content="What do I know?", + role="assistant", + ), + ) + ], + created=1714573888, + model="gpt-3.5-turbo-0125", + object="chat.completion", + system_fingerprint="fp_3b956da36b", + usage=litellm.Usage(completion_tokens=37, prompt_tokens=14, total_tokens=51), + ) langfuse_args = { - "response_obj": response_obj, + "response_obj": new_response_obj, "kwargs": { "model": "gpt-3.5-turbo", "litellm_params": { @@ -529,6 +547,10 @@ def test_langfuse_existing_trace_id(): langfuse_response_object = langfuse_Logger.log_event(**langfuse_args) + new_trace_id = langfuse_response_object["trace_id"] + + assert new_trace_id == trace_id + langfuse_client.flush() time.sleep(2)