use itellm.forward_traceparent_to_llm_provider

This commit is contained in:
Ishaan Jaff 2024-08-01 09:05:13 -07:00
parent eb075568ac
commit 866519b659
2 changed files with 14 additions and 10 deletions

View file

@ -165,6 +165,7 @@ budget_duration: Optional[str] = (
default_soft_budget: float = (
50.0 # by default all litellm proxy keys have a soft budget of 50.0
)
forward_traceparent_to_llm_provider: bool = False
_openai_finish_reasons = ["stop", "length", "function_call", "content_filter", "null"]
_openai_completion_params = [
"functions",

View file

@ -3,6 +3,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional
from fastapi import Request
import litellm
from litellm._logging import verbose_logger, verbose_proxy_logger
from litellm.proxy._types import CommonProxyErrors, TeamCallbackMetadata, UserAPIKeyAuth
from litellm.types.utils import SupportedCacheControls
@ -250,6 +251,8 @@ def _add_otel_traceparent_to_data(data: dict, request: Request):
# if user is not use OTEL don't send extra_headers
# relevant issue: https://github.com/BerriAI/litellm/issues/4448
return
if litellm.forward_traceparent_to_llm_provider is True:
if request.headers:
if "traceparent" in request.headers:
# we want to forward this to the LLM Provider