Litellm dev 12 24 2024 p2 (#7400)

* fix(utils.py): default custom_llm_provider=None for 'supports_response_schema'

Closes https://github.com/BerriAI/litellm/issues/7397

* refactor(langfuse/): call langfuse logger inside customlogger compatible langfuse class, refactor langfuse logger to use verbose_logger.debug instead of print_verbose

* refactor(litellm_pre_call_utils.py): move config based team callbacks inside dynamic team callback logic

enables simpler unit testing for config-based team callbacks

* fix(proxy/_types.py): handle teamcallbackmetadata - none values

drop none values if present. if all none, use default dict to avoid downstream errors

* test(test_proxy_utils.py): add unit test preventing future issues - asserts team_id in config state not popped off across calls

Fixes https://github.com/BerriAI/litellm/issues/6787

* fix(langfuse_prompt_management.py): add success + failure logging event support

* fix: fix linting error

* test: fix test

* test: fix test

* test: override o1 prompt caching - openai currently not working

* test: fix test
This commit is contained in:
Krish Dholakia 2024-12-24 20:33:41 -08:00 committed by GitHub
parent d790ba0897
commit c95351e70f
12 changed files with 227 additions and 62 deletions

View file

@ -231,6 +231,9 @@ def test_dynamic_logging_metadata_key_and_team_metadata(callback_vars):
os.environ["LANGFUSE_PUBLIC_KEY_TEMP"] = "pk-lf-9636b7a6-c066"
os.environ["LANGFUSE_SECRET_KEY_TEMP"] = "sk-lf-7cc8b620"
os.environ["LANGFUSE_HOST_TEMP"] = "https://us.cloud.langfuse.com"
from litellm.proxy.proxy_server import ProxyConfig
proxy_config = ProxyConfig()
user_api_key_dict = UserAPIKeyAuth(
token="6f8688eaff1d37555bb9e9a6390b6d7032b3ab2526ba0152da87128eab956432",
key_name="sk-...63Fg",
@ -288,7 +291,9 @@ def test_dynamic_logging_metadata_key_and_team_metadata(callback_vars):
rpm_limit_per_model=None,
tpm_limit_per_model=None,
)
callbacks = _get_dynamic_logging_metadata(user_api_key_dict=user_api_key_dict)
callbacks = _get_dynamic_logging_metadata(
user_api_key_dict=user_api_key_dict, proxy_config=proxy_config
)
assert callbacks is not None
@ -308,6 +313,9 @@ def test_dynamic_logging_metadata_key_and_team_metadata(callback_vars):
],
)
def test_dynamic_turn_off_message_logging(callback_vars):
from litellm.proxy.proxy_server import ProxyConfig
proxy_config = ProxyConfig()
user_api_key_dict = UserAPIKeyAuth(
token="6f8688eaff1d37555bb9e9a6390b6d7032b3ab2526ba0152da87128eab956432",
key_name="sk-...63Fg",
@ -364,7 +372,9 @@ def test_dynamic_turn_off_message_logging(callback_vars):
rpm_limit_per_model=None,
tpm_limit_per_model=None,
)
callbacks = _get_dynamic_logging_metadata(user_api_key_dict=user_api_key_dict)
callbacks = _get_dynamic_logging_metadata(
user_api_key_dict=user_api_key_dict, proxy_config=proxy_config
)
assert callbacks is not None
assert (
@ -1008,3 +1018,89 @@ def test_get_complete_model_list(proxy_model_list, provider):
for _model in complete_list:
assert provider in _model
def test_team_callback_metadata_all_none_values():
from litellm.proxy._types import TeamCallbackMetadata
resp = TeamCallbackMetadata(
success_callback=None,
failure_callback=None,
callback_vars=None,
)
assert resp.success_callback == []
assert resp.failure_callback == []
assert resp.callback_vars == {}
@pytest.mark.parametrize(
"none_key",
[
"success_callback",
"failure_callback",
"callback_vars",
],
)
def test_team_callback_metadata_none_values(none_key):
from litellm.proxy._types import TeamCallbackMetadata
if none_key == "success_callback":
args = {
"success_callback": None,
"failure_callback": ["test"],
"callback_vars": None,
}
elif none_key == "failure_callback":
args = {
"success_callback": ["test"],
"failure_callback": None,
"callback_vars": None,
}
elif none_key == "callback_vars":
args = {
"success_callback": ["test"],
"failure_callback": ["test"],
"callback_vars": None,
}
resp = TeamCallbackMetadata(**args)
assert none_key not in resp
def test_proxy_config_state_post_init_callback_call():
"""
Ensures team_id is still in config, after callback is called
Addresses issue: https://github.com/BerriAI/litellm/issues/6787
Where team_id was being popped from config, after callback was called
"""
from litellm.proxy.litellm_pre_call_utils import LiteLLMProxyRequestSetup
from litellm.proxy.proxy_server import ProxyConfig
pc = ProxyConfig()
pc.update_config_state(
config={
"litellm_settings": {
"default_team_settings": [
{
"team_id": "test",
"success_callback": ["langfuse"],
"langfuse_public_key": "os.environ/LANGFUSE_PUBLIC_KEY",
"langfuse_secret": "os.environ/LANGFUSE_SECRET_KEY",
}
]
}
}
)
LiteLLMProxyRequestSetup.add_team_based_callbacks_from_config(
team_id="test",
proxy_config=pc,
)
config = pc.get_config_state()
assert config["litellm_settings"]["default_team_settings"][0]["team_id"] == "test"