forked from phoenix/litellm-mirror
fix simplify - pass litellm_parent_otel_span
This commit is contained in:
parent
42df97db3c
commit
7ef7bc8a9a
6 changed files with 5 additions and 9 deletions
|
@ -162,7 +162,8 @@ class OpenTelemetry(CustomLogger):
|
|||
proxy_server_request = litellm_params.get("proxy_server_request", {}) or {}
|
||||
headers = proxy_server_request.get("headers", {}) or {}
|
||||
traceparent = headers.get("traceparent", None)
|
||||
parent_otel_span = litellm_params.get("litellm_parent_otel_span", None)
|
||||
_metadata = litellm_params.get("metadata", {})
|
||||
parent_otel_span = _metadata.get("litellm_parent_otel_span", None)
|
||||
|
||||
"""
|
||||
Two way to use parents in opentelemetry
|
||||
|
|
|
@ -600,7 +600,6 @@ def completion(
|
|||
client = kwargs.get("client", None)
|
||||
### Admin Controls ###
|
||||
no_log = kwargs.get("no-log", False)
|
||||
litellm_parent_otel_span = kwargs.get("litellm_parent_otel_span", None)
|
||||
######## end of unpacking kwargs ###########
|
||||
openai_params = [
|
||||
"functions",
|
||||
|
@ -690,7 +689,6 @@ def completion(
|
|||
"allowed_model_region",
|
||||
"model_config",
|
||||
"fastest_response",
|
||||
"litellm_parent_otel_span",
|
||||
]
|
||||
|
||||
default_params = openai_params + litellm_params
|
||||
|
@ -875,7 +873,6 @@ def completion(
|
|||
input_cost_per_token=input_cost_per_token,
|
||||
output_cost_per_second=output_cost_per_second,
|
||||
output_cost_per_token=output_cost_per_token,
|
||||
litellm_parent_otel_span=litellm_parent_otel_span,
|
||||
)
|
||||
logging.update_environment_variables(
|
||||
model=model,
|
||||
|
|
|
@ -106,7 +106,7 @@ async def add_litellm_data_to_request(
|
|||
data["metadata"]["headers"] = _headers
|
||||
data["metadata"]["endpoint"] = str(request.url)
|
||||
# Add the OTEL Parent Trace before sending it LiteLLM
|
||||
data["litellm_parent_otel_span"] = user_api_key_dict.parent_otel_span
|
||||
data["metadata"]["litellm_parent_otel_span"] = user_api_key_dict.parent_otel_span
|
||||
|
||||
### END-USER SPECIFIC PARAMS ###
|
||||
if user_api_key_dict.allowed_model_region is not None:
|
||||
|
|
|
@ -73,7 +73,8 @@ def print_verbose(print_statement):
|
|||
def safe_deep_copy(data):
|
||||
if isinstance(data, dict):
|
||||
# remove litellm_parent_otel_span since this is not picklable
|
||||
data.pop("litellm_parent_otel_span", None)
|
||||
if "metadata" in data and "litellm_parent_otel_span" in data["metadata"]:
|
||||
data["metadata"].pop("litellm_parent_otel_span")
|
||||
new_data = copy.deepcopy(data)
|
||||
return new_data
|
||||
|
||||
|
|
|
@ -152,7 +152,6 @@ def test_chat_completion(mock_acompletion, client_no_auth):
|
|||
specific_deployment=True,
|
||||
metadata=mock.ANY,
|
||||
proxy_server_request=mock.ANY,
|
||||
litellm_parent_otel_span=mock.ANY,
|
||||
)
|
||||
print(f"response - {response.text}")
|
||||
assert response.status_code == 200
|
||||
|
|
|
@ -4918,7 +4918,6 @@ def get_litellm_params(
|
|||
input_cost_per_token=None,
|
||||
output_cost_per_token=None,
|
||||
output_cost_per_second=None,
|
||||
litellm_parent_otel_span=None,
|
||||
):
|
||||
litellm_params = {
|
||||
"acompletion": acompletion,
|
||||
|
@ -4941,7 +4940,6 @@ def get_litellm_params(
|
|||
"input_cost_per_second": input_cost_per_second,
|
||||
"output_cost_per_token": output_cost_per_token,
|
||||
"output_cost_per_second": output_cost_per_second,
|
||||
"litellm_parent_otel_span": litellm_parent_otel_span,
|
||||
}
|
||||
|
||||
return litellm_params
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue