(fix) make print_verbose non blocking

This commit is contained in:
ishaan-jaff 2023-12-07 17:31:12 -08:00
parent d2a53f05ed
commit f744445db4
7 changed files with 37 additions and 16 deletions

View file

@ -1,5 +1,8 @@
set_verbose = False
def print_verbose(print_statement):
if set_verbose:
print(print_statement) # noqa
try:
if set_verbose:
print(print_statement) # noqa
except:
pass

View file

@ -13,9 +13,12 @@ class BudgetManager:
self.load_data()
def print_verbose(self, print_statement):
if litellm.set_verbose:
import logging
logging.info(print_statement)
try:
if litellm.set_verbose:
import logging
logging.info(print_statement)
except:
pass
def load_data(self):
if self.client_type == "local":

View file

@ -25,8 +25,11 @@ def get_prompt(*args, **kwargs):
return None
def print_verbose(print_statement):
if litellm.set_verbose:
print(print_statement) # noqa
try:
if litellm.set_verbose:
print(print_statement) # noqa
except:
pass
class BaseCache:
def set_cache(self, key, value, **kwargs):

View file

@ -2141,8 +2141,11 @@ def moderation(input: str, api_key: Optional[str]=None):
####### HELPER FUNCTIONS ################
## Set verbose to true -> ```litellm.set_verbose = True```
def print_verbose(print_statement):
if litellm.set_verbose:
print(print_statement) # noqa
try:
if litellm.set_verbose:
print(print_statement) # noqa
except:
pass
def config_completion(**kwargs):
if litellm.config_path != None:

View file

@ -199,9 +199,12 @@ celery_app_conn = None
celery_fn = None # Redis Queue for handling requests
#### HELPER FUNCTIONS ####
def print_verbose(print_statement):
global user_debug
if user_debug:
print(print_statement)
try:
global user_debug
if user_debug:
print(print_statement)
except:
pass
def usage_telemetry(
feature: str,

View file

@ -1076,8 +1076,11 @@ class Router:
return deployment.get("client", None)
def print_verbose(self, print_statement):
if self.set_verbose or litellm.set_verbose:
print(f"LiteLLM.Router: {print_statement}") # noqa
try:
if self.set_verbose or litellm.set_verbose:
print(f"LiteLLM.Router: {print_statement}") # noqa
except:
pass
def get_available_deployment(self,
model: str,

View file

@ -512,8 +512,11 @@ class TextCompletionResponse(OpenAIObject):
############################################################
def print_verbose(print_statement):
if litellm.set_verbose:
print(print_statement) # noqa
try:
if litellm.set_verbose:
print(print_statement) # noqa
except:
pass
####### LOGGING ###################
from enum import Enum