diff --git a/litellm/proxy/litellm_pre_call_utils.py b/litellm/proxy/litellm_pre_call_utils.py index 2e670de85..963cdf027 100644 --- a/litellm/proxy/litellm_pre_call_utils.py +++ b/litellm/proxy/litellm_pre_call_utils.py @@ -144,10 +144,13 @@ async def add_litellm_data_to_request( ) # do not store the original `sk-..` api key in the db data[_metadata_variable_name]["headers"] = _headers data[_metadata_variable_name]["endpoint"] = str(request.url) + + # OTEL Controls / Tracing # Add the OTEL Parent Trace before sending it LiteLLM data[_metadata_variable_name][ "litellm_parent_otel_span" ] = user_api_key_dict.parent_otel_span + _add_otel_traceparent_to_data(data, request=request) ### END-USER SPECIFIC PARAMS ### if user_api_key_dict.allowed_model_region is not None: @@ -169,3 +172,18 @@ async def add_litellm_data_to_request( } # add the team-specific configs to the completion call return data + + +def _add_otel_traceparent_to_data(data: dict, request: Request): + if data is None: + return + if request.headers: + if "traceparent" in request.headers: + # we want to forward this to the LLM Provider + # Relevant issue: https://github.com/BerriAI/litellm/issues/4419 + # pass this in extra_headers + if "extra_headers" not in data: + data["extra_headers"] = {} + _exra_headers = data["extra_headers"] + if "traceparent" not in _exra_headers: + _exra_headers["traceparent"] = request.headers["traceparent"] diff --git a/litellm/utils.py b/litellm/utils.py index a33a160e4..88b310d70 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3670,6 +3670,8 @@ def get_supported_openai_params( "tool_choice", "response_format", "seed", + "extra_headers", + "extra_body", ] elif custom_llm_provider == "deepseek": return [