(feat) proxy-read litellm custom callback class

This commit is contained in:
ishaan-jaff 2023-12-04 09:09:54 -08:00
parent ed8c666922
commit 65e00b438e
3 changed files with 34 additions and 1 deletions

View file

@ -1,4 +1,5 @@
from litellm.integrations.custom_logger import CustomLogger
import litellm
class MyCustomHandler(CustomLogger):
def log_pre_api_call(self, model, messages, kwargs):
print(f"Pre-API Call")
@ -6,6 +7,16 @@ class MyCustomHandler(CustomLogger):
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
# log: key, user, model, prompt, response, tokens, cost
print(f"Post-API Call")
print("\n kwargs\n")
print(kwargs)
model = kwargs["model"]
messages = kwargs["messages"]
cost = litellm.completion_cost(completion_response=response_obj)
# tokens used in response
usage = response_obj.usage
print(usage)
def log_stream_event(self, kwargs, response_obj, start_time, end_time):
print(f"On Stream")
@ -16,4 +27,6 @@ class MyCustomHandler(CustomLogger):
def log_failure_event(self, kwargs, response_obj, start_time, end_time):
print(f"On Failure")
customHandler = MyCustomHandler()
proxy_handler_instance = MyCustomHandler()
# need to set litellm.callbacks = [customHandler] # on the proxy