From fba7e830563ded1a3f9c136df2fbac3c59dfd7d2 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Thu, 9 May 2024 17:28:27 +0200 Subject: [PATCH 01/10] Add parent_observation_id parameter to Langfuse integration --- litellm/integrations/langfuse.py | 1 + 1 file changed, 1 insertion(+) diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index caf5437b24..49c0f6c69b 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -437,6 +437,7 @@ class LangFuseLogger: generation_params = { "name": generation_name, "id": clean_metadata.pop("generation_id", generation_id), + "parent_observation_id": metadata.get("parent_observation_id"), "start_time": start_time, "end_time": end_time, "model": kwargs["model"], From 710fc6cc41d581a8aead5c62a877866273d7b771 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Fri, 10 May 2024 10:27:58 +0200 Subject: [PATCH 02/10] Update langfuse integration docs wiht the new `parent_observation_id` parameter --- .../my-website/docs/observability/langfuse_integration.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/my-website/docs/observability/langfuse_integration.md b/docs/my-website/docs/observability/langfuse_integration.md index ebf20b6335..9aa9d899c8 100644 --- a/docs/my-website/docs/observability/langfuse_integration.md +++ b/docs/my-website/docs/observability/langfuse_integration.md @@ -122,6 +122,7 @@ response = completion( metadata={ "generation_name": "ishaan-test-generation", # set langfuse Generation Name "generation_id": "gen-id22", # set langfuse Generation ID + "parent_observation_id": "obs-id9" # set langfuse Parent Observation ID "version": "test-generation-version" # set langfuse Generation Version "trace_user_id": "user-id2", # set langfuse Trace User ID "session_id": "session-1", # set langfuse Session ID @@ -169,9 +170,10 @@ The following parameters can be updated on a continuation of a trace by passing #### Generation Specific Parameters -* `generation_id` - Identifier for the generation, auto-generated by default -* `generation_name` - Identifier for the generation, auto-generated by default -* `prompt` - Langfuse prompt object used for the generation, defaults to None +* `generation_id` - Identifier for the generation, auto-generated by default +* `generation_name` - Identifier for the generation, auto-generated by default +* `parent_observation_id` - Identifier for the parent observation, defaults to `None` +* `prompt` - Langfuse prompt object used for the generation, defaults to `None` Any other key value pairs passed into the metadata not listed in the above spec for a `litellm` completion will be added as a metadata key value pair for the generation. From 8468dba063e827af14671f9a1f92e0c9a20739c9 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Sat, 11 May 2024 18:25:53 +0200 Subject: [PATCH 03/10] Set Langfuse output to null for embedding responses --- litellm/integrations/langfuse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 49c0f6c69b..7f23c06800 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -130,7 +130,7 @@ class LangFuseLogger: or isinstance(response_obj, litellm.EmbeddingResponse) ): input = prompt - output = response_obj["data"] + output = None elif response_obj is not None and isinstance( response_obj, litellm.ModelResponse ): From bb6b2ccfdae8a34f6c87d13f0f9eda2b57f1d439 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 14 May 2024 13:36:57 +0200 Subject: [PATCH 04/10] Add timestamp_granularities parameter to transcription function --- litellm/main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/litellm/main.py b/litellm/main.py index 6156d9c398..f4451128c6 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -3924,6 +3924,7 @@ def transcription( response_format: Optional[ Literal["json", "text", "srt", "verbose_json", "vtt"] ] = None, + timestamp_granularities: Optional[List[Literal["sentence", "word"]]] = None, temperature: Optional[int] = None, # openai defaults this to 0 ## LITELLM PARAMS ## user: Optional[str] = None, @@ -3958,6 +3959,7 @@ def transcription( "language": language, "prompt": prompt, "response_format": response_format, + "timestamp_granularities": timestamp_granularities, "temperature": None, # openai defaults this to 0 } From 153b343d3aec824a7cbd5cf8bc9cd9134dc51ad9 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 14 May 2024 13:38:53 +0200 Subject: [PATCH 05/10] Add missing api key to OpenAI transcription --- litellm/main.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/litellm/main.py b/litellm/main.py index f4451128c6..c3b7e53c03 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -3997,6 +3997,13 @@ def transcription( max_retries=max_retries, ) elif custom_llm_provider == "openai": + api_key = ( + api_key + or litellm.api_key + or litellm.azure_key + or get_secret("OPENAI_API_KEY") + ) + response = openai_chat_completions.audio_transcriptions( model=model, audio_file=file, @@ -4005,6 +4012,7 @@ def transcription( atranscription=atranscription, timeout=timeout, logging_obj=litellm_logging_obj, + api_key=api_key, max_retries=max_retries, ) return response From a5f0b9da4260696a2dbe47fc946ffaec27a05a73 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 14 May 2024 13:41:37 +0200 Subject: [PATCH 06/10] Update timestamp_granularities parameter in transcription function --- litellm/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/main.py b/litellm/main.py index c3b7e53c03..58b5551a90 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -3924,7 +3924,7 @@ def transcription( response_format: Optional[ Literal["json", "text", "srt", "verbose_json", "vtt"] ] = None, - timestamp_granularities: Optional[List[Literal["sentence", "word"]]] = None, + timestamp_granularities: Optional[List[Literal['word', 'segment']]] = None, temperature: Optional[int] = None, # openai defaults this to 0 ## LITELLM PARAMS ## user: Optional[str] = None, From e0f1b7b2cf5f9a11a9d0e7246aaf005b0dfd980e Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 14 May 2024 13:41:37 +0200 Subject: [PATCH 07/10] Update timestamp_granularities parameter in transcription function --- litellm/main.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/litellm/main.py b/litellm/main.py index 58b5551a90..060d0f9ce1 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -3924,7 +3924,6 @@ def transcription( response_format: Optional[ Literal["json", "text", "srt", "verbose_json", "vtt"] ] = None, - timestamp_granularities: Optional[List[Literal['word', 'segment']]] = None, temperature: Optional[int] = None, # openai defaults this to 0 ## LITELLM PARAMS ## user: Optional[str] = None, @@ -3959,7 +3958,6 @@ def transcription( "language": language, "prompt": prompt, "response_format": response_format, - "timestamp_granularities": timestamp_granularities, "temperature": None, # openai defaults this to 0 } From b8b22ef2d1a76ed9a2840ae29473175cf13c508a Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 14 May 2024 15:48:41 +0200 Subject: [PATCH 08/10] revert transcription api key --- litellm/main.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/litellm/main.py b/litellm/main.py index 060d0f9ce1..6156d9c398 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -3995,13 +3995,6 @@ def transcription( max_retries=max_retries, ) elif custom_llm_provider == "openai": - api_key = ( - api_key - or litellm.api_key - or litellm.azure_key - or get_secret("OPENAI_API_KEY") - ) - response = openai_chat_completions.audio_transcriptions( model=model, audio_file=file, @@ -4010,7 +4003,6 @@ def transcription( atranscription=atranscription, timeout=timeout, logging_obj=litellm_logging_obj, - api_key=api_key, max_retries=max_retries, ) return response From 20c3351916019e59da40621c280ffca927638872 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 21 May 2024 18:24:18 +0200 Subject: [PATCH 09/10] Add null check to parent_observation_id assignment --- litellm/integrations/langfuse.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 153b677e07..7a78befaba 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -470,7 +470,6 @@ class LangFuseLogger: generation_params = { "name": generation_name, "id": clean_metadata.pop("generation_id", generation_id), - "parent_observation_id": metadata.get("parent_observation_id"), "start_time": start_time, "end_time": end_time, "model": kwargs["model"], @@ -483,6 +482,10 @@ class LangFuseLogger: "version": clean_metadata.pop("version", None), } + parent_observation_id = metadata.get("parent_observation_id", None) + if parent_observation_id is not None: + generation_params["parent_observation_id"] = parent_observation_id + if supports_prompt: user_prompt = clean_metadata.pop("prompt", None) if user_prompt is None: From 0c0d418d71083be765bfd7ca5c6c384f3135a990 Mon Sep 17 00:00:00 2001 From: Hannes Burrichter Date: Tue, 21 May 2024 18:25:24 +0200 Subject: [PATCH 10/10] Revert set Langfuse output to null for embedding responses --- litellm/integrations/langfuse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 7a78befaba..5e402134d2 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -129,7 +129,7 @@ class LangFuseLogger: or isinstance(response_obj, litellm.EmbeddingResponse) ): input = prompt - output = None + output = response_obj["data"] elif response_obj is not None and isinstance( response_obj, litellm.ModelResponse ):