feat(ui): add braintrust logging to ui

This commit is contained in:
Krrish Dholakia 2024-07-31 21:14:55 -07:00
parent d17c55ba74
commit d914aa558d
3 changed files with 38 additions and 8 deletions

View file

@ -1562,6 +1562,12 @@ class AllCallbacks(LiteLLMBase):
ui_callback_name="Datadog",
)
braintrust: CallbackOnUI = CallbackOnUI(
litellm_callback_name="braintrust",
litellm_callback_params=["BRAINTRUST_API_KEY"],
ui_callback_name="Braintrust",
)
class SpendLogsMetadata(TypedDict):
"""

View file

@ -55,7 +55,13 @@ async def test_endpoint(request: Request):
async def health_services_endpoint(
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
service: Literal[
"slack_budget_alerts", "langfuse", "slack", "openmeter", "webhook", "email"
"slack_budget_alerts",
"langfuse",
"slack",
"openmeter",
"webhook",
"email",
"braintrust",
] = fastapi.Query(description="Specify the service being hit."),
):
"""
@ -81,6 +87,7 @@ async def health_services_endpoint(
"slack",
"openmeter",
"webhook",
"braintrust",
]:
raise HTTPException(
status_code=400,
@ -89,7 +96,7 @@ async def health_services_endpoint(
},
)
if service == "openmeter":
if service == "openmeter" or service == "braintrust":
_ = await litellm.acompletion(
model="openai/litellm-mock-response-model",
messages=[{"role": "user", "content": "Hey, how's it going?"}],
@ -98,7 +105,7 @@ async def health_services_endpoint(
)
return {
"status": "success",
"message": "Mock LLM request made - check openmeter.",
"message": "Mock LLM request made - check {}.".format(service),
}
if service == "langfuse":
@ -283,11 +290,11 @@ async def health_endpoint(
else, the health checks will be run on models when /health is called.
"""
from litellm.proxy.proxy_server import (
health_check_details,
health_check_results,
llm_model_list,
use_background_health_checks,
user_model,
health_check_details
)
try:
@ -438,7 +445,9 @@ async def health_readiness():
try:
# this was returning a JSON of the values in some of the callbacks
# all we need is the callback name, hence we do str(callback)
success_callback_names = [callback_name(x) for x in litellm.success_callback]
success_callback_names = [
callback_name(x) for x in litellm.success_callback
]
except AttributeError:
# don't let this block the /health/readiness response, if we can't convert to str -> return litellm.success_callback
success_callback_names = litellm.success_callback
@ -483,12 +492,12 @@ async def health_readiness():
@router.get(
"/health/liveliness", # Historical LiteLLM name; doesn't match k8s terminology but kept for backwards compatibility
"/health/liveliness", # Historical LiteLLM name; doesn't match k8s terminology but kept for backwards compatibility
tags=["health"],
dependencies=[Depends(user_api_key_auth)],
)
@router.get(
"/health/liveness", # Kubernetes has "liveness" probes (https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-a-liveness-command)
"/health/liveness", # Kubernetes has "liveness" probes (https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-a-liveness-command)
tags=["health"],
dependencies=[Depends(user_api_key_auth)],
)
@ -522,7 +531,7 @@ async def health_readiness_options():
dependencies=[Depends(user_api_key_auth)],
)
@router.options(
"/health/liveness", # Kubernetes has "liveness" probes (https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-a-liveness-command)
"/health/liveness", # Kubernetes has "liveness" probes (https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-a-liveness-command)
tags=["health"],
dependencies=[Depends(user_api_key_auth)],
)

View file

@ -9368,6 +9368,21 @@ async def get_config():
_data_to_return.append(
{"name": _callback, "variables": _langfuse_env_vars}
)
elif _callback == "braintrust":
env_vars = [
"BRAINTRUST_API_KEY",
]
env_vars_dict = {}
for _var in env_vars:
env_variable = environment_variables.get(_var, None)
if env_variable is None:
env_vars_dict[_var] = None
else:
# decode + decrypt the value
decrypted_value = decrypt_value_helper(value=env_variable)
env_vars_dict[_var] = decrypted_value
_data_to_return.append({"name": _callback, "variables": env_vars_dict})
# Check if slack alerting is on
_alerting = _general_settings.get("alerting", [])