fix(litellm_pre_call_utils.py): handle dynamic keys via api correctly

This commit is contained in:
Krrish Dholakia 2024-08-21 13:36:33 -07:00
parent 842f6c54df
commit ac5c6c8751
5 changed files with 39 additions and 9 deletions

View file

@ -210,7 +210,7 @@ class Logging:
self.optional_params = optional_params
self.model = model
self.user = user
self.litellm_params = litellm_params
self.litellm_params = scrub_sensitive_keys_in_metadata(litellm_params)
self.logger_fn = litellm_params.get("logger_fn", None)
verbose_logger.debug(f"self.optional_params: {self.optional_params}")
@ -2353,3 +2353,28 @@ def get_standard_logging_object_payload(
"Error creating standard logging object - {}".format(str(e))
)
return None
def scrub_sensitive_keys_in_metadata(litellm_params: Optional[dict]):
if litellm_params is None:
litellm_params = {}
metadata = litellm_params.get("metadata", {}) or {}
## check user_api_key_metadata for sensitive logging keys
cleaned_user_api_key_metadata = {}
if "user_api_key_metadata" in metadata and isinstance(
metadata["user_api_key_metadata"], dict
):
for k, v in metadata["user_api_key_metadata"].items():
if k == "logging": # prevent logging user logging keys
cleaned_user_api_key_metadata[k] = (
"scrubbed_by_litellm_for_sensitive_keys"
)
else:
cleaned_user_api_key_metadata[k] = v
metadata["user_api_key_metadata"] = cleaned_user_api_key_metadata
litellm_params["metadata"] = metadata
return litellm_params

View file

@ -1,4 +1,4 @@
model_list:
- model_name: ollama/mistral
- model_name: "*"
litellm_params:
model: ollama/mistral
model: "*"

View file

@ -95,7 +95,9 @@ def convert_key_logging_metadata_to_callback(
for var, value in data.callback_vars.items():
if team_callback_settings_obj.callback_vars is None:
team_callback_settings_obj.callback_vars = {}
team_callback_settings_obj.callback_vars[var] = litellm.get_secret(value)
team_callback_settings_obj.callback_vars[var] = (
litellm.utils.get_secret(value, default_value=value) or value
)
return team_callback_settings_obj
@ -130,7 +132,6 @@ def _get_dynamic_logging_metadata(
data=AddTeamCallback(**item),
team_callback_settings_obj=callback_settings_obj,
)
return callback_settings_obj

View file

@ -1116,8 +1116,8 @@ async def test_add_callback_via_key_litellm_pre_call_utils(prisma_client):
"callback_name": "langfuse",
"callback_type": "success",
"callback_vars": {
"langfuse_public_key": "os.environ/LANGFUSE_PUBLIC_KEY",
"langfuse_secret_key": "os.environ/LANGFUSE_SECRET_KEY",
"langfuse_public_key": "my-mock-public-key",
"langfuse_secret_key": "my-mock-secret-key",
"langfuse_host": "https://us.cloud.langfuse.com",
},
}
@ -1165,7 +1165,9 @@ async def test_add_callback_via_key_litellm_pre_call_utils(prisma_client):
assert "success_callback" in new_data
assert new_data["success_callback"] == ["langfuse"]
assert "langfuse_public_key" in new_data
assert new_data["langfuse_public_key"] == "my-mock-public-key"
assert "langfuse_secret_key" in new_data
assert new_data["langfuse_secret_key"] == "my-mock-secret-key"
@pytest.mark.asyncio

View file

@ -121,7 +121,7 @@ import importlib.metadata
from openai import OpenAIError as OriginalError
from ._logging import verbose_logger
from .caching import RedisCache, RedisSemanticCache, S3Cache, QdrantSemanticCache
from .caching import QdrantSemanticCache, RedisCache, RedisSemanticCache, S3Cache
from .exceptions import (
APIConnectionError,
APIError,
@ -8622,7 +8622,9 @@ def get_secret(
return secret_value_as_bool
else:
return secret
except:
except Exception:
if default_value is not None:
return default_value
return secret
except Exception as e:
if default_value is not None: