mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(proxy_server.py): putting load config in a try-except block
This commit is contained in:
parent
704aaf2adc
commit
7af0223d72
1 changed files with 57 additions and 54 deletions
|
@ -182,6 +182,7 @@ def save_params_to_config(data: dict):
|
|||
|
||||
|
||||
def load_config():
|
||||
try:
|
||||
global user_config, user_api_base, user_max_tokens, user_temperature, user_model
|
||||
# As the .env file is typically much simpler in structure, we use load_dotenv here directly
|
||||
with open(user_config_path, "rb") as f:
|
||||
|
@ -243,6 +244,8 @@ def load_config():
|
|||
},
|
||||
final_prompt_value=model_prompt_template.get("MODEL_POST_PROMPT", ""),
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def initialize(model, alias, api_base, debug, temperature, max_tokens, max_budget, telemetry, drop_params,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue