mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(__init__.py): add 'log_raw_request_response' flag to init
This commit is contained in:
parent
9c7788ae48
commit
6305d2dbcf
2 changed files with 6 additions and 3 deletions
|
@ -61,6 +61,7 @@ pre_call_rules: List[Callable] = []
|
|||
post_call_rules: List[Callable] = []
|
||||
turn_off_message_logging: Optional[bool] = False
|
||||
redact_messages_in_exceptions: Optional[bool] = False
|
||||
log_raw_request_response: bool = False
|
||||
store_audit_logs = False # Enterprise feature, allow users to see audit logs
|
||||
## end of callbacks #############
|
||||
|
||||
|
|
|
@ -34,14 +34,15 @@ class MyCustomHandler(CustomLogger):
|
|||
self.response_cost = 0
|
||||
|
||||
def log_pre_api_call(self, model, messages, kwargs):
|
||||
print(f"Pre-API Call")
|
||||
print("Pre-API Call")
|
||||
traceback.print_stack()
|
||||
self.data_sent_to_api = kwargs["additional_args"].get("complete_input_dict", {})
|
||||
|
||||
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
|
||||
print(f"Post-API Call")
|
||||
print("Post-API Call")
|
||||
|
||||
def log_stream_event(self, kwargs, response_obj, start_time, end_time):
|
||||
print(f"On Stream")
|
||||
print("On Stream")
|
||||
|
||||
def log_success_event(self, kwargs, response_obj, start_time, end_time):
|
||||
print(f"On Success")
|
||||
|
@ -372,6 +373,7 @@ async def test_async_custom_handler_embedding_optional_param():
|
|||
Tests if the openai optional params for embedding - user + encoding_format,
|
||||
are logged
|
||||
"""
|
||||
litellm.set_verbose = True
|
||||
customHandler_optional_params = MyCustomHandler()
|
||||
litellm.callbacks = [customHandler_optional_params]
|
||||
response = await litellm.aembedding(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue