mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
use itellm.forward_traceparent_to_llm_provider
This commit is contained in:
parent
e32c1f4c9b
commit
3ff07dfb14
2 changed files with 14 additions and 10 deletions
|
@ -165,6 +165,7 @@ budget_duration: Optional[str] = (
|
|||
default_soft_budget: float = (
|
||||
50.0 # by default all litellm proxy keys have a soft budget of 50.0
|
||||
)
|
||||
forward_traceparent_to_llm_provider: bool = False
|
||||
_openai_finish_reasons = ["stop", "length", "function_call", "content_filter", "null"]
|
||||
_openai_completion_params = [
|
||||
"functions",
|
||||
|
|
|
@ -3,6 +3,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional
|
|||
|
||||
from fastapi import Request
|
||||
|
||||
import litellm
|
||||
from litellm._logging import verbose_logger, verbose_proxy_logger
|
||||
from litellm.proxy._types import CommonProxyErrors, TeamCallbackMetadata, UserAPIKeyAuth
|
||||
from litellm.types.utils import SupportedCacheControls
|
||||
|
@ -250,6 +251,8 @@ def _add_otel_traceparent_to_data(data: dict, request: Request):
|
|||
# if user is not use OTEL don't send extra_headers
|
||||
# relevant issue: https://github.com/BerriAI/litellm/issues/4448
|
||||
return
|
||||
|
||||
if litellm.forward_traceparent_to_llm_provider is True:
|
||||
if request.headers:
|
||||
if "traceparent" in request.headers:
|
||||
# we want to forward this to the LLM Provider
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue