fix(langsmith.py): fix langsmith streaming logging

This commit is contained in:
Krrish Dholakia 2023-12-23 10:02:35 +05:30
parent c568bb6cac
commit 43f4096014
2 changed files with 32 additions and 4 deletions

View file

@ -8,7 +8,7 @@ from litellm import completion
import litellm
litellm.success_callback = ["langsmith"]
# litellm.set_verbose = True
litellm.set_verbose = True
import time
@ -26,7 +26,7 @@ def test_langsmith_logging():
except Exception as e:
print(e)
test_langsmith_logging()
# test_langsmith_logging()
def test_langsmith_logging_with_metadata():
@ -47,4 +47,26 @@ def test_langsmith_logging_with_metadata():
except Exception as e:
print(e)
test_langsmith_logging_with_metadata()
# test_langsmith_logging_with_metadata()
def test_langsmith_logging_with_streaming_and_metadata():
try:
response = completion(model="gpt-3.5-turbo",
messages=[{
"role": "user",
"content": "what llm are u"
}],
max_tokens=10,
temperature=0.2,
metadata={
"run_name": "litellmRUN",
"project_name": "litellm-completion",
},
stream=True
)
for chunk in response:
continue
except Exception as e:
print(e)
test_langsmith_logging_with_streaming_and_metadata()

View file

@ -985,6 +985,12 @@ class Logging:
)
if callback == "langsmith":
print_verbose("reaches langsmtih for logging!")
if self.stream:
if "complete_streaming_response" not in kwargs:
break
else:
print_verbose("reaches langfuse for streaming logging!")
result = kwargs["complete_streaming_response"]
langsmithLogger.log_event(
kwargs=self.model_call_details,
response_obj=result,
@ -1035,7 +1041,7 @@ class Logging:
# this only logs streaming once, complete_streaming_response exists i.e when stream ends
if self.stream:
if "complete_streaming_response" not in kwargs:
return
break
else:
print_verbose("reaches langfuse for streaming logging!")
result = kwargs["complete_streaming_response"]