mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(proxy_server.py): adding coverage for v1/models
This commit is contained in:
parent
4960b00055
commit
836f615b4f
1 changed files with 1 additions and 0 deletions
|
@ -408,6 +408,7 @@ litellm.failure_callback = [logger]
|
||||||
|
|
||||||
|
|
||||||
#### API ENDPOINTS ####
|
#### API ENDPOINTS ####
|
||||||
|
@router.post("/v1/models")
|
||||||
@router.get("/models") # if project requires model list
|
@router.get("/models") # if project requires model list
|
||||||
def model_list():
|
def model_list():
|
||||||
if user_model != None:
|
if user_model != None:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue