(feat) bump langfuse logger, track function responses, completion() metadata like temp, max_tokens etc

This commit is contained in:
ishaan-jaff 2023-10-17 11:51:01 -07:00
parent dbd1d702e4
commit 4cccad35f4
2 changed files with 45 additions and 33 deletions

View file

@ -37,14 +37,24 @@ class LangFuseLogger:
# print(response_obj['usage']['prompt_tokens'])
# print(response_obj['usage']['completion_tokens'])
metadata = kwargs.get("metadata", {})
prompt = [kwargs['messages']]
# langfuse does not accept jsons for logging metadata #
kwargs.pop("litellm_logging_obj", None)
kwargs.pop("messages", None)
kwargs.pop("functions", None) # ensure it's a safe pop
kwargs.pop("function_call", None) # ensure it's a safe pop
kwargs.pop("metadata", None) # ensure it's a safe pop
# end of processing langfuse ########################
self.Langfuse.generation(InitialGeneration(
name=metadata.get("generation_name", "litellm-completion"),
startTime=start_time,
endTime=end_time,
model=kwargs['model'],
# modelParameters= kwargs,
prompt=[kwargs['messages']],
completion=response_obj['choices'][0]['message']['content'],
modelParameters= kwargs,
prompt=prompt,
completion=response_obj['choices'][0]['message'],
usage=Usage(
prompt_tokens=response_obj['usage']['prompt_tokens'],
completion_tokens=response_obj['usage']['completion_tokens']

View file

@ -21,6 +21,7 @@ def test_langfuse_logging():
"content": "Hi 👋 - i'm claude"
}],
max_tokens=10,
temperature=0.2
)
print(response)
except Exception as e:
@ -48,37 +49,38 @@ def test_langfuse_logging_custom_generation_name():
test_langfuse_logging_custom_generation_name()
# def test_langfuse_logging_function_calling():
# function1 = [
# {
# "name": "get_current_weather",
# "description": "Get the current weather in a given location",
# "parameters": {
# "type": "object",
# "properties": {
# "location": {
# "type": "string",
# "description": "The city and state, e.g. San Francisco, CA",
# },
# "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
# },
# "required": ["location"],
# },
# }
# ]
# try:
# response = completion(model="gpt-3.5-turbo",
# messages=[{
# "role": "user",
# "content": "what's the weather outside"
# }],
# functions=function1,
# )
# print(response)
# except Exception as e:
# print(e)
def test_langfuse_logging_function_calling():
function1 = [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
}
]
try:
response = completion(model="gpt-3.5-turbo",
messages=[{
"role": "user",
"content": "what's the weather in boston"
}],
temperature=0.1,
functions=function1,
)
print(response)
except Exception as e:
print(e)
# test_langfuse_logging_function_calling()
test_langfuse_logging_function_calling()