fix(litellm_logging.py): Fix gemini google ai studio system prompt logging

Fixes https://github.com/BerriAI/litellm/issues/2963
This commit is contained in:
Krrish Dholakia 2024-06-22 09:11:23 -07:00
parent 709bd9678c
commit f814f24d9d

View file

@ -173,7 +173,7 @@ class Logging:
new_messages.append({"role": "user", "content": m}) new_messages.append({"role": "user", "content": m})
messages = new_messages messages = new_messages
self.model = model self.model = model
self.messages = messages self.messages = copy.deepcopy(messages)
self.stream = stream self.stream = stream
self.start_time = start_time # log the call start time self.start_time = start_time # log the call start time
self.call_type = call_type self.call_type = call_type
@ -263,10 +263,17 @@ class Logging:
if headers is None: if headers is None:
headers = {} headers = {}
data = additional_args.get("complete_input_dict", {}) data = additional_args.get("complete_input_dict", {})
api_base = additional_args.get("api_base", "") api_base = str(additional_args.get("api_base", ""))
self.model_call_details["litellm_params"]["api_base"] = str( if "key=" in api_base:
api_base # Find the position of "key=" in the string
) # used for alerting key_index = api_base.find("key=") + 4
# Mask the last 5 characters after "key="
masked_api_base = (
api_base[:key_index] + "*" * 5 + api_base[key_index + 5 :]
)
else:
masked_api_base = api_base
self.model_call_details["litellm_params"]["api_base"] = masked_api_base
masked_headers = { masked_headers = {
k: ( k: (
(v[:-44] + "*" * 44) (v[:-44] + "*" * 44)