diff --git a/docs/my-website/docs/proxy/quick_start.md b/docs/my-website/docs/proxy/quick_start.md index f8ff95004..37d8d221a 100644 --- a/docs/my-website/docs/proxy/quick_start.md +++ b/docs/my-website/docs/proxy/quick_start.md @@ -611,16 +611,31 @@ print(result) ## Debugging Proxy -Run the proxy with `--debug` to easily view debug logs + +Events that occur during normal operation ```shell litellm --model gpt-3.5-turbo --debug ``` -When making requests you should see the POST request sent by LiteLLM to the LLM on the Terminal output +Detailed information ```shell -POST Request Sent from LiteLLM: -curl -X POST \ -https://api.openai.com/v1/chat/completions \ --H 'content-type: application/json' -H 'Authorization: Bearer sk-qnWGUIW9****************************************' \ --d '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "this is a test request, write a short poem"}]}' +litellm --model gpt-3.5-turbo --detailed_debug ``` + +### Set Debug Level using env variables + +Events that occur during normal operation +```shell +export LITELLM_LOG=INFO +``` + +Detailed information +```shell +export LITELLM_LOG=DEBUG +``` + +No Logs +```shell +export LITELLM_LOG=None +``` + diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index eca51926f..6feba0bb5 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -976,6 +976,33 @@ async def initialize( verbose_router_logger.setLevel(level=logging.DEBUG) # set router logs to info verbose_proxy_logger.setLevel(level=logging.DEBUG) # set proxy logs to debug litellm.set_verbose = True + elif debug == False and detailed_debug == False: + # users can control proxy debugging using env variable = 'LITELLM_LOG' + litellm_log_setting = os.environ.get("LITELLM_LOG", "") + if litellm_log_setting != None: + if litellm_log_setting.upper() == "INFO": + from litellm._logging import verbose_router_logger, verbose_proxy_logger + import logging + + # this must ALWAYS remain logging.INFO, DO NOT MODIFY THIS + + verbose_router_logger.setLevel( + level=logging.INFO + ) # set router logs to info + verbose_proxy_logger.setLevel( + level=logging.INFO + ) # set proxy logs to info + elif litellm_log_setting.upper() == "DEBUG": + from litellm._logging import verbose_router_logger, verbose_proxy_logger + import logging + + verbose_router_logger.setLevel( + level=logging.DEBUG + ) # set router logs to info + verbose_proxy_logger.setLevel( + level=logging.DEBUG + ) # set proxy logs to debug + litellm.set_verbose = True dynamic_config = {"general": {}, user_model: {}} if config: