mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
refactor: add black formatting
This commit is contained in:
parent
b87d630b0a
commit
4905929de3
156 changed files with 19723 additions and 10869 deletions
|
@ -2,6 +2,7 @@ class TraceloopLogger:
|
|||
def __init__(self):
|
||||
from traceloop.sdk.tracing.tracing import TracerWrapper
|
||||
from traceloop.sdk import Traceloop
|
||||
|
||||
Traceloop.init(app_name="Litellm-Server", disable_batch=True)
|
||||
self.tracer_wrapper = TracerWrapper()
|
||||
|
||||
|
@ -29,15 +30,18 @@ class TraceloopLogger:
|
|||
)
|
||||
if "stop" in optional_params:
|
||||
span.set_attribute(
|
||||
SpanAttributes.LLM_CHAT_STOP_SEQUENCES, optional_params.get("stop")
|
||||
SpanAttributes.LLM_CHAT_STOP_SEQUENCES,
|
||||
optional_params.get("stop"),
|
||||
)
|
||||
if "frequency_penalty" in optional_params:
|
||||
span.set_attribute(
|
||||
SpanAttributes.LLM_FREQUENCY_PENALTY, optional_params.get("frequency_penalty")
|
||||
SpanAttributes.LLM_FREQUENCY_PENALTY,
|
||||
optional_params.get("frequency_penalty"),
|
||||
)
|
||||
if "presence_penalty" in optional_params:
|
||||
span.set_attribute(
|
||||
SpanAttributes.LLM_PRESENCE_PENALTY, optional_params.get("presence_penalty")
|
||||
SpanAttributes.LLM_PRESENCE_PENALTY,
|
||||
optional_params.get("presence_penalty"),
|
||||
)
|
||||
if "top_p" in optional_params:
|
||||
span.set_attribute(
|
||||
|
@ -45,7 +49,10 @@ class TraceloopLogger:
|
|||
)
|
||||
if "tools" in optional_params or "functions" in optional_params:
|
||||
span.set_attribute(
|
||||
SpanAttributes.LLM_REQUEST_FUNCTIONS, optional_params.get("tools", optional_params.get("functions"))
|
||||
SpanAttributes.LLM_REQUEST_FUNCTIONS,
|
||||
optional_params.get(
|
||||
"tools", optional_params.get("functions")
|
||||
),
|
||||
)
|
||||
if "user" in optional_params:
|
||||
span.set_attribute(
|
||||
|
@ -53,7 +60,8 @@ class TraceloopLogger:
|
|||
)
|
||||
if "max_tokens" in optional_params:
|
||||
span.set_attribute(
|
||||
SpanAttributes.LLM_REQUEST_MAX_TOKENS, kwargs.get("max_tokens")
|
||||
SpanAttributes.LLM_REQUEST_MAX_TOKENS,
|
||||
kwargs.get("max_tokens"),
|
||||
)
|
||||
if "temperature" in optional_params:
|
||||
span.set_attribute(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue