forked from phoenix/litellm-mirror
fix(proxy_server.py): reject bad /model/new POST requests
This commit is contained in:
parent
cc29860785
commit
40c974999e
2 changed files with 18 additions and 6 deletions
|
@ -1888,7 +1888,7 @@ async def add_new_model(model_params: ModelParams):
|
||||||
config = yaml.safe_load(config_file)
|
config = yaml.safe_load(config_file)
|
||||||
else:
|
else:
|
||||||
config = {"model_list": []}
|
config = {"model_list": []}
|
||||||
|
backup_config = copy.deepcopy(config)
|
||||||
print_verbose(f"Loaded config: {config}")
|
print_verbose(f"Loaded config: {config}")
|
||||||
# Add the new model to the config
|
# Add the new model to the config
|
||||||
model_info = model_params.model_info.json()
|
model_info = model_params.model_info.json()
|
||||||
|
@ -1908,16 +1908,27 @@ async def add_new_model(model_params: ModelParams):
|
||||||
yaml.dump(config, config_file, default_flow_style=False)
|
yaml.dump(config, config_file, default_flow_style=False)
|
||||||
|
|
||||||
# update Router
|
# update Router
|
||||||
llm_router, llm_model_list, general_settings = load_router_config(
|
try:
|
||||||
router=llm_router, config_file_path=user_config_file_path
|
llm_router, llm_model_list, general_settings = load_router_config(
|
||||||
)
|
router=llm_router, config_file_path=user_config_file_path
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Rever to old config instead
|
||||||
|
with open(f"{user_config_file_path}", "w") as config_file:
|
||||||
|
yaml.dump(backup_config, config_file, default_flow_style=False)
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid Model passed in")
|
||||||
|
|
||||||
print_verbose(f"llm_model_list: {llm_model_list}")
|
print_verbose(f"llm_model_list: {llm_model_list}")
|
||||||
return {"message": "Model added successfully"}
|
return {"message": "Model added successfully"}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}")
|
if isinstance(e, HTTPException):
|
||||||
|
raise e
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Internal Server Error: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#### [BETA] - This is a beta endpoint, format might change based on user feedback https://github.com/BerriAI/litellm/issues/933. If you need a stable endpoint use /model/info
|
#### [BETA] - This is a beta endpoint, format might change based on user feedback https://github.com/BerriAI/litellm/issues/933. If you need a stable endpoint use /model/info
|
||||||
|
|
|
@ -127,6 +127,7 @@ def admin_page(is_admin="NOT_GIVEN"):
|
||||||
"mode": mode_selected,
|
"mode": mode_selected,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
print(f"model_info: {model_info}")
|
||||||
# Make the POST request to the specified URL
|
# Make the POST request to the specified URL
|
||||||
complete_url = ""
|
complete_url = ""
|
||||||
if st.session_state["proxy_url"].endswith("/"):
|
if st.session_state["proxy_url"].endswith("/"):
|
||||||
|
@ -143,7 +144,7 @@ def admin_page(is_admin="NOT_GIVEN"):
|
||||||
st.success("Model added successfully!")
|
st.success("Model added successfully!")
|
||||||
else:
|
else:
|
||||||
st.error(
|
st.error(
|
||||||
f"Failed to add model. Status code: {response.status_code}"
|
f"Failed to add model. Status code: {response.status_code}; Error Message: {response.json()['detail']}"
|
||||||
)
|
)
|
||||||
|
|
||||||
st.success("Form submitted successfully!")
|
st.success("Form submitted successfully!")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue