forked from phoenix/litellm-mirror
enable litedebugger opt-out
This commit is contained in:
parent
254dcce39f
commit
efbb476020
4 changed files with 12 additions and 14 deletions
|
@ -24,7 +24,7 @@ togetherai_api_key: Optional[str] = None
|
|||
caching = False
|
||||
caching_with_models = False # if you want the caching key to be model + prompt
|
||||
model_alias_map: Dict[str, str] = {}
|
||||
debugger = False
|
||||
client = True
|
||||
model_cost = {
|
||||
"babbage-002": {
|
||||
"max_tokens": 16384,
|
||||
|
|
|
@ -12,8 +12,6 @@ import pytest
|
|||
import litellm
|
||||
from litellm import embedding, completion
|
||||
|
||||
litellm.debugger = True
|
||||
|
||||
# from infisical import InfisicalClient
|
||||
|
||||
# litellm.set_verbose = True
|
||||
|
|
|
@ -304,6 +304,7 @@ def client(original_function):
|
|||
): # just run once to check if user wants to send their data anywhere - PostHog/Sentry/Slack/etc.
|
||||
try:
|
||||
global callback_list, add_breadcrumb, user_logger_fn
|
||||
if litellm.client: # enable users to opt-out of the debugging dashboard by setting `litellm.client = False`
|
||||
if litellm.email is not None or os.getenv("LITELLM_EMAIL", None) is not None or litellm.token is not None or os.getenv("LITELLM_TOKEN", None): # add to input, success and failure callbacks if user is using hosted product
|
||||
get_all_keys()
|
||||
if "lite_debugger" not in callback_list:
|
||||
|
@ -316,7 +317,6 @@ def client(original_function):
|
|||
litellm.input_callback.append("lite_debugger")
|
||||
litellm.success_callback.append("lite_debugger")
|
||||
litellm.failure_callback.append("lite_debugger")
|
||||
|
||||
if (
|
||||
len(litellm.input_callback) > 0
|
||||
or len(litellm.success_callback) > 0
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "litellm"
|
||||
version = "0.1.477"
|
||||
version = "0.1.478"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
authors = ["BerriAI"]
|
||||
license = "MIT License"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue