(fix) proxy /model/new writing to config

This commit is contained in:
ishaan-jaff 2023-12-06 14:08:22 -08:00
parent 346551da29
commit 0598ab9b63

View file

@ -1027,12 +1027,15 @@ async def info_key_fn(key: str = fastapi.Query(..., description="Key in the requ
async def add_new_model(model_params: ModelParams): async def add_new_model(model_params: ModelParams):
global llm_router, llm_model_list, general_settings, user_config_file_path global llm_router, llm_model_list, general_settings, user_config_file_path
try: try:
print("User config path: ", user_config_file_path)
# Load existing config # Load existing config
if os.path.exists(f"{user_config_file_path}"): if os.path.exists(f"{user_config_file_path}"):
with open(f"{user_config_file_path}", "r") as config_file: with open(f"{user_config_file_path}", "r") as config_file:
config = yaml.safe_load(config_file) config = yaml.safe_load(config_file)
else: else:
config = {"model_list": []} config = {"model_list": []}
print("Loaded config: ", config)
# Add the new model to the config # Add the new model to the config
config['model_list'].append({ config['model_list'].append({
'model_name': model_params.model_name, 'model_name': model_params.model_name,
@ -1045,7 +1048,7 @@ async def add_new_model(model_params: ModelParams):
yaml.dump(config, config_file, default_flow_style=False) yaml.dump(config, config_file, default_flow_style=False)
# update Router # update Router
llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=config) llm_router, llm_model_list, general_settings = load_router_config(router=llm_router, config_file_path=user_config_file_path)
return {"message": "Model added successfully"} return {"message": "Model added successfully"}