fix(litellm_logging.py): fix lago callback logic

This commit is contained in:
Krrish Dholakia 2024-06-17 09:10:19 -07:00
parent cc066e019a
commit 2623bb260f
8 changed files with 112 additions and 102 deletions

View file

@ -1,13 +1,19 @@
# What is this?
## On Success events log cost to Lago - https://github.com/BerriAI/litellm/issues/3639
import dotenv, os, json
import json
import os
import traceback
import uuid
from typing import Literal, Optional
import dotenv
import httpx
import litellm
import traceback, httpx
from litellm import verbose_logger
from litellm.integrations.custom_logger import CustomLogger
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler
import uuid
from typing import Optional, Literal
def get_utc_datetime():
@ -143,6 +149,7 @@ class LagoLogger(CustomLogger):
async def async_log_success_event(self, kwargs, response_obj, start_time, end_time):
try:
verbose_logger.debug("ENTERS LAGO CALLBACK")
_url = os.getenv("LAGO_API_BASE")
assert _url is not None and isinstance(
_url, str

View file

@ -1,74 +1,76 @@
# What is this?
## Common Utility file for Logging handler
# Logging function -> log the exact model details + what's being sent | Non-Blocking
from litellm.types.utils import CallTypes
from typing import Optional
import copy
import datetime
import json
import os
import subprocess
import sys
import time
import traceback
import uuid
from typing import Callable, Optional
import litellm
from litellm import (
verbose_logger,
json_logs,
log_raw_request_response,
turn_off_message_logging,
verbose_logger,
)
import traceback
import litellm
import copy
import sys
import uuid
import os
from litellm.integrations.custom_logger import CustomLogger
import json
import time
from litellm.litellm_core_utils.redact_messages import (
redact_message_input_output_from_logging,
)
from litellm.utils import (
_get_base_model_from_metadata,
supabaseClient,
liteDebuggerClient,
promptLayerLogger,
weightsBiasesLogger,
langsmithLogger,
logfireLogger,
capture_exception,
add_breadcrumb,
lunaryLogger,
prometheusLogger,
print_verbose,
customLogger,
prompt_token_calculator,
)
from litellm.types.utils import (
ModelResponse,
CallTypes,
EmbeddingResponse,
ImageResponse,
TranscriptionResponse,
ModelResponse,
TextCompletionResponse,
TranscriptionResponse,
)
import subprocess
from ..integrations.traceloop import TraceloopLogger
from ..integrations.athina import AthinaLogger
from ..integrations.helicone import HeliconeLogger
from litellm.utils import (
_get_base_model_from_metadata,
add_breadcrumb,
capture_exception,
customLogger,
langsmithLogger,
liteDebuggerClient,
logfireLogger,
lunaryLogger,
print_verbose,
prometheusLogger,
prompt_token_calculator,
promptLayerLogger,
supabaseClient,
weightsBiasesLogger,
)
from ..integrations.aispend import AISpendLogger
from ..integrations.athina import AthinaLogger
from ..integrations.berrispend import BerriSpendLogger
from ..integrations.supabase import Supabase
from ..integrations.lunary import LunaryLogger
from ..integrations.prompt_layer import PromptLayerLogger
from ..integrations.langsmith import LangsmithLogger
from ..integrations.logfire_logger import LogfireLogger, LogfireLevel
from ..integrations.weights_biases import WeightsBiasesLogger
from ..integrations.clickhouse import ClickhouseLogger
from ..integrations.custom_logger import CustomLogger
from ..integrations.langfuse import LangFuseLogger
from ..integrations.openmeter import OpenMeterLogger
from ..integrations.lago import LagoLogger
from ..integrations.datadog import DataDogLogger
from ..integrations.dynamodb import DyanmoDBLogger
from ..integrations.greenscale import GreenscaleLogger
from ..integrations.helicone import HeliconeLogger
from ..integrations.lago import LagoLogger
from ..integrations.langfuse import LangFuseLogger
from ..integrations.langsmith import LangsmithLogger
from ..integrations.litedebugger import LiteDebugger
from ..integrations.logfire_logger import LogfireLevel, LogfireLogger
from ..integrations.lunary import LunaryLogger
from ..integrations.openmeter import OpenMeterLogger
from ..integrations.prometheus import PrometheusLogger
from ..integrations.prometheus_services import PrometheusServicesLogger
from ..integrations.dynamodb import DyanmoDBLogger
from ..integrations.prompt_layer import PromptLayerLogger
from ..integrations.s3 import S3Logger
from ..integrations.clickhouse import ClickhouseLogger
from ..integrations.greenscale import GreenscaleLogger
from ..integrations.litedebugger import LiteDebugger
from ..integrations.supabase import Supabase
from ..integrations.traceloop import TraceloopLogger
from ..integrations.weights_biases import WeightsBiasesLogger
class Logging:
@ -1778,3 +1780,12 @@ def set_callbacks(callback_list, function_id=None):
customLogger = CustomLogger()
except Exception as e:
raise e
def _init_custom_logger_compatible_class(
logging_integration: litellm._custom_logger_compatible_callbacks_literal,
) -> Callable:
if logging_integration == "lago":
return LagoLogger() # type: ignore
elif logging_integration == "openmeter":
return OpenMeterLogger() # type: ignore

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -75,6 +75,7 @@ router_settings:
enable_pre_call_checks: true
litellm_settings:
callbacks: ["lago"]
success_callback: ["langfuse"]
failure_callback: ["langfuse"]
cache: true

View file

@ -1,56 +1,53 @@
from typing import Optional, List, Any, Literal, Union, TYPE_CHECKING, Tuple
import os
import subprocess
import hashlib
import importlib
import asyncio
import copy
import hashlib
import importlib
import json
import httpx
import os
import re
import smtplib
import subprocess
import time
import litellm
import backoff
import traceback
from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from functools import wraps
from typing import TYPE_CHECKING, Any, List, Literal, Optional, Tuple, Union
import backoff
import httpx
from fastapi import HTTPException, Request, status
from pydantic import BaseModel
from typing_extensions import overload
import litellm
import litellm.litellm_core_utils
import litellm.litellm_core_utils.litellm_logging
from litellm import EmbeddingResponse, ImageResponse, ModelResponse
from litellm._logging import verbose_proxy_logger
from litellm._service_logger import ServiceLogging, ServiceTypes
from litellm.caching import DualCache, RedisCache
from litellm.exceptions import RejectedRequestError
from litellm.integrations.custom_logger import CustomLogger
from litellm.integrations.slack_alerting import SlackAlerting
from litellm.llms.custom_httpx.httpx_handler import HTTPHandler
from litellm.proxy._types import (
UserAPIKeyAuth,
AlertType,
CallInfo,
DynamoDBArgs,
LiteLLM_VerificationTokenView,
CallInfo,
AlertType,
ResetTeamBudgetRequest,
LitellmUserRoles,
ResetTeamBudgetRequest,
SpendLogsMetadata,
SpendLogsPayload,
UserAPIKeyAuth,
)
from litellm.caching import DualCache, RedisCache
from litellm.llms.custom_httpx.httpx_handler import HTTPHandler
from litellm.proxy.hooks.cache_control_check import _PROXY_CacheControlCheck
from litellm.proxy.hooks.max_budget_limiter import _PROXY_MaxBudgetLimiter
from litellm.proxy.hooks.parallel_request_limiter import (
_PROXY_MaxParallelRequestsHandler,
)
from litellm.exceptions import RejectedRequestError
from litellm._service_logger import ServiceLogging, ServiceTypes
from litellm import (
ModelResponse,
EmbeddingResponse,
ImageResponse,
)
from litellm.proxy.hooks.max_budget_limiter import _PROXY_MaxBudgetLimiter
from litellm.proxy.hooks.cache_control_check import _PROXY_CacheControlCheck
from litellm.integrations.custom_logger import CustomLogger
from litellm._logging import verbose_proxy_logger
from fastapi import HTTPException, status
import smtplib
import re
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from datetime import datetime, timedelta
from litellm.integrations.slack_alerting import SlackAlerting
from typing_extensions import overload
from functools import wraps
from fastapi import Request
if TYPE_CHECKING:
from opentelemetry.trace import Span as _Span
@ -244,7 +241,9 @@ class ProxyLogging:
)
for callback in litellm.callbacks:
if isinstance(callback, str):
callback = litellm.utils._init_custom_logger_compatible_class(callback)
callback = litellm.litellm_core_utils.litellm_logging._init_custom_logger_compatible_class(
callback
)
if callback not in litellm.input_callback:
litellm.input_callback.append(callback)
if callback not in litellm.success_callback:
@ -1957,8 +1956,7 @@ async def send_email(receiver_email, subject, html):
sender_email,
"""
## SERVER SETUP ##
from litellm.proxy.proxy_server import premium_user
from litellm.proxy.proxy_server import CommonProxyErrors
from litellm.proxy.proxy_server import CommonProxyErrors, premium_user
smtp_host = os.getenv("SMTP_HOST")
smtp_port = int(os.getenv("SMTP_PORT", "587")) # default to port 587
@ -2008,9 +2006,10 @@ def hash_token(token: str):
def get_logging_payload(
kwargs, response_obj, start_time, end_time, end_user_id: Optional[str]
) -> SpendLogsPayload:
from litellm.proxy._types import LiteLLM_SpendLogs
from pydantic import Json
from litellm.proxy._types import LiteLLM_SpendLogs
verbose_proxy_logger.debug(
f"SpendTable: get_logging_payload - kwargs: {kwargs}\n\n"
)
@ -2762,6 +2761,7 @@ def _is_valid_team_configs(team_id=None, team_config=None, request_data=None):
def encrypt_value(value: str, master_key: str):
import hashlib
import nacl.secret
import nacl.utils
@ -2782,6 +2782,7 @@ def encrypt_value(value: str, master_key: str):
def decrypt_value(value: bytes, master_key: str) -> str:
import hashlib
import nacl.secret
import nacl.utils

View file

@ -327,15 +327,6 @@ class Rules:
return True
def _init_custom_logger_compatible_class(
logging_integration: litellm._custom_logger_compatible_callbacks_literal,
) -> Callable:
if logging_integration == "lago":
return LagoLogger() # type: ignore
elif logging_integration == "openmeter":
return OpenMeterLogger() # type: ignore
####### CLIENT ###################
# make it easy to log if completion/embedding runs succeeded or failed + see what happened | Non-Blocking
def function_setup(
@ -353,7 +344,9 @@ def function_setup(
for callback in litellm.callbacks:
# check if callback is a string - e.g. "lago", "openmeter"
if isinstance(callback, str):
callback = _init_custom_logger_compatible_class(callback)
callback = litellm.litellm_core_utils.litellm_logging._init_custom_logger_compatible_class(
callback
)
if any(
isinstance(cb, type(callback))
for cb in litellm._async_success_callback