forked from phoenix/litellm-mirror
fix(proxy_server.py): support langfuse logging for rejected requests on /v1/chat/completions
This commit is contained in:
parent
017af34866
commit
7618ec43b3
7 changed files with 74 additions and 33 deletions
|
@ -447,13 +447,24 @@ class OpenTelemetry(CustomLogger):
|
|||
# cast sr -> dict
|
||||
import json
|
||||
|
||||
_raw_response = json.loads(_raw_response)
|
||||
for param, val in _raw_response.items():
|
||||
if not isinstance(val, str):
|
||||
val = str(val)
|
||||
try:
|
||||
_raw_response = json.loads(_raw_response)
|
||||
for param, val in _raw_response.items():
|
||||
if not isinstance(val, str):
|
||||
val = str(val)
|
||||
span.set_attribute(
|
||||
f"llm.{custom_llm_provider}.{param}",
|
||||
val,
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
verbose_logger.debug(
|
||||
"litellm.integrations.opentelemetry.py::set_raw_request_attributes() - raw_response not json string - {}".format(
|
||||
_raw_response
|
||||
)
|
||||
)
|
||||
span.set_attribute(
|
||||
f"llm.{custom_llm_provider}.{param}",
|
||||
val,
|
||||
f"llm.{custom_llm_provider}.stringified_raw_response",
|
||||
_raw_response,
|
||||
)
|
||||
|
||||
pass
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue