(fix) proxy debugging display Init API key

This commit is contained in:
ishaan-jaff 2023-12-05 16:08:12 -08:00
parent b4c78c7b9e
commit d2dab362df
2 changed files with 3 additions and 3 deletions

View file

@ -609,6 +609,8 @@ def initialize(
generate_feedback_box() generate_feedback_box()
user_model = model user_model = model
user_debug = debug user_debug = debug
if debug==True: # this needs to be first, so users can see Router init debugg
litellm.set_verbose = True
dynamic_config = {"general": {}, user_model: {}} dynamic_config = {"general": {}, user_model: {}}
if config: if config:
llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=config) llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=config)
@ -646,8 +648,6 @@ def initialize(
if max_budget: # litellm-specific param if max_budget: # litellm-specific param
litellm.max_budget = max_budget litellm.max_budget = max_budget
dynamic_config["general"]["max_budget"] = max_budget dynamic_config["general"]["max_budget"] = max_budget
if debug==True: # litellm-specific param
litellm.set_verbose = True
if use_queue: if use_queue:
celery_setup(use_queue=use_queue) celery_setup(use_queue=use_queue)
if experimental: if experimental:

View file

@ -959,7 +959,7 @@ class Router:
) )
else: else:
self.print_verbose(f"Initializing OpenAI Client for {model_name}, {str(api_base)}") self.print_verbose(f"Initializing OpenAI Client for {model_name}, {str(api_base)}, {api_key}")
model["async_client"] = openai.AsyncOpenAI( model["async_client"] = openai.AsyncOpenAI(
api_key=api_key, api_key=api_key,
base_url=api_base, base_url=api_base,