forked from phoenix/litellm-mirror
(fix) proxy debugging display Init API key
This commit is contained in:
parent
b4c78c7b9e
commit
d2dab362df
2 changed files with 3 additions and 3 deletions
|
@ -609,6 +609,8 @@ def initialize(
|
|||
generate_feedback_box()
|
||||
user_model = model
|
||||
user_debug = debug
|
||||
if debug==True: # this needs to be first, so users can see Router init debugg
|
||||
litellm.set_verbose = True
|
||||
dynamic_config = {"general": {}, user_model: {}}
|
||||
if config:
|
||||
llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=config)
|
||||
|
@ -646,8 +648,6 @@ def initialize(
|
|||
if max_budget: # litellm-specific param
|
||||
litellm.max_budget = max_budget
|
||||
dynamic_config["general"]["max_budget"] = max_budget
|
||||
if debug==True: # litellm-specific param
|
||||
litellm.set_verbose = True
|
||||
if use_queue:
|
||||
celery_setup(use_queue=use_queue)
|
||||
if experimental:
|
||||
|
|
|
@ -959,7 +959,7 @@ class Router:
|
|||
)
|
||||
|
||||
else:
|
||||
self.print_verbose(f"Initializing OpenAI Client for {model_name}, {str(api_base)}")
|
||||
self.print_verbose(f"Initializing OpenAI Client for {model_name}, {str(api_base)}, {api_key}")
|
||||
model["async_client"] = openai.AsyncOpenAI(
|
||||
api_key=api_key,
|
||||
base_url=api_base,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue