mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(proxy_server.py): add support for passing in config file via worker_config directly + testing
This commit is contained in:
parent
fa74831d79
commit
8f8f961941
2 changed files with 61 additions and 1 deletions
|
@ -507,6 +507,14 @@ class ProxyConfig:
|
|||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def is_yaml(self, config_file_path: str) -> bool:
|
||||
if not os.path.isfile(config_file_path):
|
||||
return False
|
||||
|
||||
_, file_extension = os.path.splitext(config_file_path)
|
||||
return file_extension.lower() == '.yaml' or file_extension.lower() == '.yml'
|
||||
|
||||
|
||||
async def get_config(self, config_file_path: Optional[str] = None) -> dict:
|
||||
global prisma_client, user_config_file_path
|
||||
|
||||
|
@ -1156,7 +1164,14 @@ async def startup_event():
|
|||
verbose_proxy_logger.debug(f"worker_config: {worker_config}")
|
||||
# check if it's a valid file path
|
||||
if os.path.isfile(worker_config):
|
||||
await initialize(**worker_config)
|
||||
if proxy_config.is_yaml(config_file_path=worker_config):
|
||||
(
|
||||
llm_router,
|
||||
llm_model_list,
|
||||
general_settings,
|
||||
) = await proxy_config.load_config(router=llm_router, config_file_path=worker_config)
|
||||
else:
|
||||
await initialize(**worker_config)
|
||||
else:
|
||||
# if not, assume it's a json string
|
||||
worker_config = json.loads(os.getenv("WORKER_CONFIG"))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue