forked from phoenix/litellm-mirror
try/except completion_cost + custom logger func
This commit is contained in:
parent
73c25de950
commit
7dc694c704
2 changed files with 80 additions and 30 deletions
36
litellm/integrations/custom_logger.py
Normal file
36
litellm/integrations/custom_logger.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
#### What this does ####
|
||||||
|
# On success, logs events to Promptlayer
|
||||||
|
import dotenv, os
|
||||||
|
import requests
|
||||||
|
import requests
|
||||||
|
|
||||||
|
dotenv.load_dotenv() # Loading env variables using dotenv
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
class CustomLogger:
|
||||||
|
# Class variables or attributes
|
||||||
|
def __init__(self, callback_func):
|
||||||
|
# Instance variables
|
||||||
|
self.callback_func = callback_func
|
||||||
|
|
||||||
|
def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose):
|
||||||
|
# Method definition
|
||||||
|
try:
|
||||||
|
print_verbose(
|
||||||
|
f"Custom Logger - Enters logging function for model {kwargs}"
|
||||||
|
)
|
||||||
|
self.callback_func(
|
||||||
|
kwargs, # kwargs to func
|
||||||
|
response_obj,
|
||||||
|
start_time,
|
||||||
|
end_time,
|
||||||
|
print_verbose
|
||||||
|
)
|
||||||
|
print_verbose(
|
||||||
|
f"Custom Logger - final response object: {response_obj}"
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
# traceback.print_exc()
|
||||||
|
print_verbose(f"Custom Logger Error - {traceback.format_exc()}")
|
||||||
|
pass
|
|
@ -17,6 +17,7 @@ from .integrations.berrispend import BerriSpendLogger
|
||||||
from .integrations.supabase import Supabase
|
from .integrations.supabase import Supabase
|
||||||
from .integrations.llmonitor import LLMonitorLogger
|
from .integrations.llmonitor import LLMonitorLogger
|
||||||
from .integrations.prompt_layer import PromptLayerLogger
|
from .integrations.prompt_layer import PromptLayerLogger
|
||||||
|
from .integrations.custom_logger import CustomLogger
|
||||||
from .integrations.langfuse import LangFuseLogger
|
from .integrations.langfuse import LangFuseLogger
|
||||||
from .integrations.litedebugger import LiteDebugger
|
from .integrations.litedebugger import LiteDebugger
|
||||||
from openai.error import OpenAIError as OriginalError
|
from openai.error import OpenAIError as OriginalError
|
||||||
|
@ -46,6 +47,7 @@ slack_app = None
|
||||||
alerts_channel = None
|
alerts_channel = None
|
||||||
heliconeLogger = None
|
heliconeLogger = None
|
||||||
promptLayerLogger = None
|
promptLayerLogger = None
|
||||||
|
customLogger = None
|
||||||
langFuseLogger = None
|
langFuseLogger = None
|
||||||
llmonitorLogger = None
|
llmonitorLogger = None
|
||||||
aispendLogger = None
|
aispendLogger = None
|
||||||
|
@ -677,7 +679,7 @@ def completion_cost(
|
||||||
completion="",
|
completion="",
|
||||||
total_time=0.0, # used for replicate
|
total_time=0.0, # used for replicate
|
||||||
):
|
):
|
||||||
|
try:
|
||||||
# Handle Inputs to completion_cost
|
# Handle Inputs to completion_cost
|
||||||
prompt_tokens = 0
|
prompt_tokens = 0
|
||||||
completion_tokens = 0
|
completion_tokens = 0
|
||||||
|
@ -706,6 +708,8 @@ def completion_cost(
|
||||||
model=model, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens
|
model=model, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens
|
||||||
)
|
)
|
||||||
return prompt_tokens_cost_usd_dollar + completion_tokens_cost_usd_dollar
|
return prompt_tokens_cost_usd_dollar + completion_tokens_cost_usd_dollar
|
||||||
|
except:
|
||||||
|
return 0.0 # this should not block a users execution path
|
||||||
|
|
||||||
####### HELPER FUNCTIONS ################
|
####### HELPER FUNCTIONS ################
|
||||||
def get_litellm_params(
|
def get_litellm_params(
|
||||||
|
@ -993,7 +997,7 @@ def validate_environment():
|
||||||
return api_key
|
return api_key
|
||||||
|
|
||||||
def set_callbacks(callback_list, function_id=None):
|
def set_callbacks(callback_list, function_id=None):
|
||||||
global sentry_sdk_instance, capture_exception, add_breadcrumb, posthog, slack_app, alerts_channel, traceloopLogger, heliconeLogger, aispendLogger, berrispendLogger, supabaseClient, liteDebuggerClient, llmonitorLogger, promptLayerLogger, langFuseLogger
|
global sentry_sdk_instance, capture_exception, add_breadcrumb, posthog, slack_app, alerts_channel, traceloopLogger, heliconeLogger, aispendLogger, berrispendLogger, supabaseClient, liteDebuggerClient, llmonitorLogger, promptLayerLogger, langFuseLogger, customLogger
|
||||||
try:
|
try:
|
||||||
for callback in callback_list:
|
for callback in callback_list:
|
||||||
print_verbose(f"callback: {callback}")
|
print_verbose(f"callback: {callback}")
|
||||||
|
@ -1073,6 +1077,8 @@ def set_callbacks(callback_list, function_id=None):
|
||||||
liteDebuggerClient = LiteDebugger(email=litellm.email)
|
liteDebuggerClient = LiteDebugger(email=litellm.email)
|
||||||
else:
|
else:
|
||||||
liteDebuggerClient = LiteDebugger(email=str(uuid.uuid4()))
|
liteDebuggerClient = LiteDebugger(email=str(uuid.uuid4()))
|
||||||
|
elif callable(callback):
|
||||||
|
customLogger = CustomLogger()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
@ -1366,6 +1372,14 @@ def handle_success(args, kwargs, result, start_time, end_time):
|
||||||
litellm_call_id=kwargs["litellm_call_id"],
|
litellm_call_id=kwargs["litellm_call_id"],
|
||||||
print_verbose=print_verbose,
|
print_verbose=print_verbose,
|
||||||
)
|
)
|
||||||
|
elif callable(callback): # custom logger functions
|
||||||
|
customLogger.log_event(
|
||||||
|
kwargs=kwargs,
|
||||||
|
response_obj=result,
|
||||||
|
start_time=start_time,
|
||||||
|
end_time=end_time,
|
||||||
|
print_verbose=print_verbose,
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# LOGGING
|
# LOGGING
|
||||||
exception_logging(logger_fn=user_logger_fn, exception=e)
|
exception_logging(logger_fn=user_logger_fn, exception=e)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue