fix opt-out of litellm client

This commit is contained in:
ishaan-jaff 2023-08-24 12:42:07 -07:00
parent 46332876b4
commit 4c081674fe
2 changed files with 2 additions and 2 deletions

View file

@ -24,7 +24,7 @@ togetherai_api_key: Optional[str] = None
caching = False
caching_with_models = False # if you want the caching key to be model + prompt
model_alias_map: Dict[str, str] = {}
client = True
use_client = True
model_cost = {
"babbage-002": {
"max_tokens": 16384,

View file

@ -304,7 +304,7 @@ def client(original_function):
): # just run once to check if user wants to send their data anywhere - PostHog/Sentry/Slack/etc.
try:
global callback_list, add_breadcrumb, user_logger_fn
if litellm.client: # enable users to opt-out of the debugging dashboard by setting `litellm.client = False`
if litellm.use_client: # enable users to opt-out of the debugging dashboard by setting `litellm.client = False`
if litellm.email is not None or os.getenv("LITELLM_EMAIL", None) is not None or litellm.token is not None or os.getenv("LITELLM_TOKEN", None): # add to input, success and failure callbacks if user is using hosted product
get_all_keys()
if "lite_debugger" not in callback_list: