forked from phoenix/litellm-mirror
(docs) proxy: add tags=chat/completions + add response type
This commit is contained in:
parent
2977d04f56
commit
e77b7e5a50
1 changed files with 4 additions and 4 deletions
|
@ -709,10 +709,10 @@ async def completion(request: Request, model: Optional[str] = None, user_api_key
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/v1/chat/completions", dependencies=[Depends(user_api_key_auth)])
|
@router.post("/v1/chat/completions", dependencies=[Depends(user_api_key_auth)], tags=["chat/completions"])
|
||||||
@router.post("/chat/completions", dependencies=[Depends(user_api_key_auth)])
|
@router.post("/chat/completions", dependencies=[Depends(user_api_key_auth)], tags=["chat/completions"])
|
||||||
@router.post("/openai/deployments/{model:path}/chat/completions", dependencies=[Depends(user_api_key_auth)]) # azure compatible endpoint
|
@router.post("/openai/deployments/{model:path}/chat/completions", dependencies=[Depends(user_api_key_auth)], tags=["chat/completions"]) # azure compatible endpoint
|
||||||
async def chat_completion(request: Request, model: Optional[str] = None, user_api_key_dict: dict = Depends(user_api_key_auth)):
|
async def chat_completion(request: Request, model: Optional[str] = None, user_api_key_dict: dict = Depends(user_api_key_auth)) -> litellm.ModelResponse:
|
||||||
global general_settings, user_debug
|
global general_settings, user_debug
|
||||||
try:
|
try:
|
||||||
data = {}
|
data = {}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue