From f744445db4efdebb7fdd8924f08a2371d5899b28 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 7 Dec 2023 17:31:12 -0800 Subject: [PATCH] (fix) make print_verbose non blocking --- litellm/_logging.py | 7 +++++-- litellm/budget_manager.py | 9 ++++++--- litellm/caching.py | 7 +++++-- litellm/main.py | 7 +++++-- litellm/proxy/proxy_server.py | 9 ++++++--- litellm/router.py | 7 +++++-- litellm/utils.py | 7 +++++-- 7 files changed, 37 insertions(+), 16 deletions(-) diff --git a/litellm/_logging.py b/litellm/_logging.py index ab776b9b61..0c68814064 100644 --- a/litellm/_logging.py +++ b/litellm/_logging.py @@ -1,5 +1,8 @@ set_verbose = False def print_verbose(print_statement): - if set_verbose: - print(print_statement) # noqa \ No newline at end of file + try: + if set_verbose: + print(print_statement) # noqa + except: + pass \ No newline at end of file diff --git a/litellm/budget_manager.py b/litellm/budget_manager.py index 6a9d1e5207..07468e2f50 100644 --- a/litellm/budget_manager.py +++ b/litellm/budget_manager.py @@ -13,9 +13,12 @@ class BudgetManager: self.load_data() def print_verbose(self, print_statement): - if litellm.set_verbose: - import logging - logging.info(print_statement) + try: + if litellm.set_verbose: + import logging + logging.info(print_statement) + except: + pass def load_data(self): if self.client_type == "local": diff --git a/litellm/caching.py b/litellm/caching.py index 1b6963cc67..4f000689cd 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -25,8 +25,11 @@ def get_prompt(*args, **kwargs): return None def print_verbose(print_statement): - if litellm.set_verbose: - print(print_statement) # noqa + try: + if litellm.set_verbose: + print(print_statement) # noqa + except: + pass class BaseCache: def set_cache(self, key, value, **kwargs): diff --git a/litellm/main.py b/litellm/main.py index 6d7fc34039..76eae2b1a4 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -2141,8 +2141,11 @@ def moderation(input: str, api_key: Optional[str]=None): ####### HELPER FUNCTIONS ################ ## Set verbose to true -> ```litellm.set_verbose = True``` def print_verbose(print_statement): - if litellm.set_verbose: - print(print_statement) # noqa + try: + if litellm.set_verbose: + print(print_statement) # noqa + except: + pass def config_completion(**kwargs): if litellm.config_path != None: diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 6f250df5d1..b4763c11cf 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -199,9 +199,12 @@ celery_app_conn = None celery_fn = None # Redis Queue for handling requests #### HELPER FUNCTIONS #### def print_verbose(print_statement): - global user_debug - if user_debug: - print(print_statement) + try: + global user_debug + if user_debug: + print(print_statement) + except: + pass def usage_telemetry( feature: str, diff --git a/litellm/router.py b/litellm/router.py index 9bdf69f93e..04f518b2d3 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -1076,8 +1076,11 @@ class Router: return deployment.get("client", None) def print_verbose(self, print_statement): - if self.set_verbose or litellm.set_verbose: - print(f"LiteLLM.Router: {print_statement}") # noqa + try: + if self.set_verbose or litellm.set_verbose: + print(f"LiteLLM.Router: {print_statement}") # noqa + except: + pass def get_available_deployment(self, model: str, diff --git a/litellm/utils.py b/litellm/utils.py index f2c363fb42..4b64caa8bd 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -512,8 +512,11 @@ class TextCompletionResponse(OpenAIObject): ############################################################ def print_verbose(print_statement): - if litellm.set_verbose: - print(print_statement) # noqa + try: + if litellm.set_verbose: + print(print_statement) # noqa + except: + pass ####### LOGGING ################### from enum import Enum