mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix(langfuse.py): fix handling of dict object for langfuse prompt management
This commit is contained in:
parent
ed2a85bb7b
commit
8e43e08a49
1 changed files with 23 additions and 1 deletions
|
@ -474,7 +474,29 @@ class LangFuseLogger:
|
|||
}
|
||||
|
||||
if supports_prompt:
|
||||
generation_params["prompt"] = clean_metadata.pop("prompt", None)
|
||||
user_prompt = clean_metadata.pop("prompt", None)
|
||||
if user_prompt is None:
|
||||
pass
|
||||
elif isinstance(user_prompt, dict):
|
||||
from langfuse.model import (
|
||||
TextPromptClient,
|
||||
ChatPromptClient,
|
||||
Prompt_Text,
|
||||
Prompt_Chat,
|
||||
)
|
||||
|
||||
if user_prompt.get("type", "") == "chat":
|
||||
_prompt_chat = Prompt_Chat(**user_prompt)
|
||||
generation_params["prompt"] = ChatPromptClient(
|
||||
prompt=_prompt_chat
|
||||
)
|
||||
elif user_prompt.get("type", "") == "text":
|
||||
_prompt_text = Prompt_Text(**user_prompt)
|
||||
generation_params["prompt"] = TextPromptClient(
|
||||
prompt=_prompt_text
|
||||
)
|
||||
else:
|
||||
generation_params["prompt"] = user_prompt
|
||||
|
||||
if output is not None and isinstance(output, str) and level == "ERROR":
|
||||
generation_params["status_message"] = output
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue