mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(proxy_server.py): add more detailed debugging
This commit is contained in:
parent
2a69650e8c
commit
e38ff06d01
1 changed files with 7 additions and 2 deletions
|
@ -2390,11 +2390,12 @@ class ProxyConfig:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Master key is not initialized or formatted. master_key={master_key}"
|
f"Master key is not initialized or formatted. master_key={master_key}"
|
||||||
)
|
)
|
||||||
|
verbose_proxy_logger.debug(f"llm_router: {llm_router}")
|
||||||
if llm_router is None:
|
if llm_router is None:
|
||||||
new_models = (
|
new_models = (
|
||||||
await prisma_client.db.litellm_proxymodeltable.find_many()
|
await prisma_client.db.litellm_proxymodeltable.find_many()
|
||||||
) # get all models in db
|
) # get all models in db
|
||||||
|
verbose_proxy_logger.debug(f"new_models: {new_models}")
|
||||||
_model_list: list = []
|
_model_list: list = []
|
||||||
for m in new_models:
|
for m in new_models:
|
||||||
_litellm_params = m.litellm_params
|
_litellm_params = m.litellm_params
|
||||||
|
@ -2429,8 +2430,12 @@ class ProxyConfig:
|
||||||
model_info=_model_info,
|
model_info=_model_info,
|
||||||
).to_json(exclude_none=True)
|
).to_json(exclude_none=True)
|
||||||
)
|
)
|
||||||
|
verbose_proxy_logger.debug(f"_model_list: {_model_list}")
|
||||||
llm_router = litellm.Router(model_list=_model_list)
|
llm_router = litellm.Router(model_list=_model_list)
|
||||||
|
verbose_proxy_logger.debug(f"updated llm_router: {llm_router}")
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
f"llm_router model list: {llm_router.model_list}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_models = await prisma_client.db.litellm_proxymodeltable.find_many(
|
new_models = await prisma_client.db.litellm_proxymodeltable.find_many(
|
||||||
take=10, order={"updated_at": "desc"}
|
take=10, order={"updated_at": "desc"}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue