mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
Merge pull request #5058 from BerriAI/litellm_add_debug_statements_connecting_prisma
Fix - add debug statements when connecting to prisma DB
This commit is contained in:
commit
4c4cc1ba23
3 changed files with 75 additions and 2 deletions
|
@ -1,9 +1,11 @@
|
|||
# Start tracing memory allocations
|
||||
import json
|
||||
import os
|
||||
import tracemalloc
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
import litellm
|
||||
from litellm._logging import verbose_proxy_logger
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -130,3 +132,70 @@ async def get_otel_spans():
|
|||
"spans_grouped_by_parent": spans_grouped_by_parent,
|
||||
"most_recent_parent": most_recent_parent,
|
||||
}
|
||||
|
||||
|
||||
# Helper functions for debugging
|
||||
def init_verbose_loggers():
|
||||
worker_config = litellm.get_secret("WORKER_CONFIG")
|
||||
if os.path.isfile(worker_config):
|
||||
return
|
||||
# if not, assume it's a json string
|
||||
_settings = json.loads(os.getenv("WORKER_CONFIG"))
|
||||
if not isinstance(_settings, dict):
|
||||
return
|
||||
|
||||
debug = _settings.get("debug", None)
|
||||
detailed_debug = _settings.get("detailed_debug", None)
|
||||
if debug is True: # this needs to be first, so users can see Router init debugg
|
||||
import logging
|
||||
|
||||
from litellm._logging import (
|
||||
verbose_logger,
|
||||
verbose_proxy_logger,
|
||||
verbose_router_logger,
|
||||
)
|
||||
|
||||
# this must ALWAYS remain logging.INFO, DO NOT MODIFY THIS
|
||||
verbose_logger.setLevel(level=logging.INFO) # sets package logs to info
|
||||
verbose_router_logger.setLevel(level=logging.INFO) # set router logs to info
|
||||
verbose_proxy_logger.setLevel(level=logging.INFO) # set proxy logs to info
|
||||
if detailed_debug == True:
|
||||
import logging
|
||||
|
||||
from litellm._logging import (
|
||||
verbose_logger,
|
||||
verbose_proxy_logger,
|
||||
verbose_router_logger,
|
||||
)
|
||||
|
||||
verbose_logger.setLevel(level=logging.DEBUG) # set package log to debug
|
||||
verbose_router_logger.setLevel(level=logging.DEBUG) # set router logs to debug
|
||||
verbose_proxy_logger.setLevel(level=logging.DEBUG) # set proxy logs to debug
|
||||
elif debug == False and detailed_debug == False:
|
||||
# users can control proxy debugging using env variable = 'LITELLM_LOG'
|
||||
litellm_log_setting = os.environ.get("LITELLM_LOG", "")
|
||||
if litellm_log_setting != None:
|
||||
if litellm_log_setting.upper() == "INFO":
|
||||
import logging
|
||||
|
||||
from litellm._logging import verbose_proxy_logger, verbose_router_logger
|
||||
|
||||
# this must ALWAYS remain logging.INFO, DO NOT MODIFY THIS
|
||||
|
||||
verbose_router_logger.setLevel(
|
||||
level=logging.INFO
|
||||
) # set router logs to info
|
||||
verbose_proxy_logger.setLevel(
|
||||
level=logging.INFO
|
||||
) # set proxy logs to info
|
||||
elif litellm_log_setting.upper() == "DEBUG":
|
||||
import logging
|
||||
|
||||
from litellm._logging import verbose_proxy_logger, verbose_router_logger
|
||||
|
||||
verbose_router_logger.setLevel(
|
||||
level=logging.DEBUG
|
||||
) # set router logs to info
|
||||
verbose_proxy_logger.setLevel(
|
||||
level=logging.DEBUG
|
||||
) # set proxy logs to debug
|
||||
|
|
|
@ -141,6 +141,7 @@ from litellm.proxy.common_utils.admin_ui_utils import (
|
|||
setup_admin_ui_on_server_root_path,
|
||||
show_missing_vars_in_env,
|
||||
)
|
||||
from litellm.proxy.common_utils.debug_utils import init_verbose_loggers
|
||||
from litellm.proxy.common_utils.debug_utils import router as debugging_endpoints_router
|
||||
from litellm.proxy.common_utils.encrypt_decrypt_utils import (
|
||||
decrypt_value_helper,
|
||||
|
@ -2547,6 +2548,8 @@ async def startup_event():
|
|||
global prisma_client, master_key, use_background_health_checks, llm_router, llm_model_list, general_settings, proxy_budget_rescheduler_min_time, proxy_budget_rescheduler_max_time, litellm_proxy_admin_name, db_writer_client, store_model_in_db, premium_user, _license_check
|
||||
import json
|
||||
|
||||
init_verbose_loggers()
|
||||
|
||||
### LOAD MASTER KEY ###
|
||||
# check if master key set in environment - load from there
|
||||
master_key = litellm.get_secret("LITELLM_MASTER_KEY", None)
|
||||
|
|
|
@ -815,7 +815,7 @@ class PrismaClient:
|
|||
spend_log_transactions: List = []
|
||||
|
||||
def __init__(self, database_url: str, proxy_logging_obj: ProxyLogging):
|
||||
print_verbose(
|
||||
verbose_proxy_logger.debug(
|
||||
"LiteLLM: DATABASE_URL Set in config, trying to 'pip install prisma'"
|
||||
)
|
||||
## init logging object
|
||||
|
@ -844,8 +844,9 @@ class PrismaClient:
|
|||
os.chdir(original_dir)
|
||||
# Now you can import the Prisma Client
|
||||
from prisma import Prisma # type: ignore
|
||||
|
||||
verbose_proxy_logger.debug("Connecting Prisma Client to DB..")
|
||||
self.db = Prisma() # Client to connect to Prisma db
|
||||
verbose_proxy_logger.debug("Success - Connected Prisma Client to DB")
|
||||
|
||||
def hash_token(self, token: str):
|
||||
# Hash the string using SHA-256
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue