fix(traceloop.py): add additional openllmetry traces

This commit is contained in:
Krrish Dholakia 2023-12-16 19:21:39 -08:00
parent 7c2fad2d57
commit 7847ae1e23
2 changed files with 36 additions and 9 deletions

View file

@ -11,13 +11,14 @@ class TraceloopLogger:
try:
tracer = self.tracer_wrapper.get_tracer()
model = kwargs.get("model")
# LiteLLM uses the standard OpenAI library, so it's already handled by Traceloop SDK
if "gpt" in model:
if kwargs.get("litellm_params").get("custom_llm_provider") == "openai":
return
optional_params = kwargs.get("optional_params", {})
with tracer.start_as_current_span(
"litellm.completion",
kind=SpanKind.CLIENT,
@ -26,12 +27,38 @@ class TraceloopLogger:
span.set_attribute(
SpanAttributes.LLM_REQUEST_MODEL, kwargs.get("model")
)
span.set_attribute(
SpanAttributes.LLM_REQUEST_MAX_TOKENS, kwargs.get("max_tokens")
)
span.set_attribute(
SpanAttributes.LLM_TEMPERATURE, kwargs.get("temperature")
)
if "stop" in optional_params:
span.set_attribute(
SpanAttributes.LLM_CHAT_STOP_SEQUENCES, optional_params.get("stop")
)
if "frequency_penalty" in optional_params:
span.set_attribute(
SpanAttributes.LLM_FREQUENCY_PENALTY, optional_params.get("frequency_penalty")
)
if "presence_penalty" in optional_params:
span.set_attribute(
SpanAttributes.LLM_PRESENCE_PENALTY, optional_params.get("presence_penalty")
)
if "top_p" in optional_params:
span.set_attribute(
SpanAttributes.LLM_TOP_P, optional_params.get("top_p")
)
if "tools" in optional_params or "functions" in optional_params:
span.set_attribute(
SpanAttributes.LLM_REQUEST_FUNCTIONS, optional_params.get("tools", optional_params.get("functions"))
)
if "user" in optional_params:
span.set_attribute(
SpanAttributes.LLM_USER, optional_params.get("user")
)
if "max_tokens" in optional_params:
span.set_attribute(
SpanAttributes.LLM_REQUEST_MAX_TOKENS, kwargs.get("max_tokens")
)
if "temperature" in optional_params:
span.set_attribute(
SpanAttributes.LLM_TEMPERATURE, kwargs.get("temperature")
)
for idx, prompt in enumerate(kwargs.get("messages")):
span.set_attribute(

View file

@ -222,7 +222,7 @@ class Delta(OpenAIObject):
class Choices(OpenAIObject):
def __init__(self, finish_reason=None, index=0, message=None, **params):
super(Choices, self).__init__(**params)
self.finish_reason = map_finish_reason(finish_reason) # set finish_reason for all responses
self.finish_reason = map_finish_reason(finish_reason) or "stop" # set finish_reason for all responses
self.index = index
if message is None:
self.message = Message(content=None)