feat(vertex_httpx.py): support logging vertex ai safety results to langfuse

Closes https://github.com/BerriAI/litellm/issues/3230
This commit is contained in:
Krrish Dholakia 2024-07-26 20:50:18 -07:00
parent a7f964b869
commit fe7f78fbf6
2 changed files with 27 additions and 0 deletions

View file

@ -529,6 +529,7 @@ class Logging:
or isinstance(result, TextCompletionResponse)
or isinstance(result, HttpxBinaryResponseContent) # tts
):
## RESPONSE COST ##
custom_pricing = use_custom_pricing_for_model(
litellm_params=self.litellm_params
)
@ -548,6 +549,25 @@ class Logging:
custom_pricing=custom_pricing,
)
)
## HIDDEN PARAMS ##
if hasattr(result, "_hidden_params"):
# add to metadata for logging
if self.model_call_details.get("litellm_params") is not None:
self.model_call_details["litellm_params"].setdefault(
"metadata", {}
)
if (
self.model_call_details["litellm_params"]["metadata"]
is None
):
self.model_call_details["litellm_params"][
"metadata"
] = {}
self.model_call_details["litellm_params"]["metadata"][
"hidden_params"
] = result._hidden_params
else: # streaming chunks + image gen.
self.model_call_details["response_cost"] = None