forked from phoenix/litellm-mirror
refactor(proxy_server.py): experimental endpoint - defined job status
This commit is contained in:
parent
4e06b4a26f
commit
331fe86c8f
1 changed files with 12 additions and 5 deletions
|
@ -17,6 +17,7 @@ try:
|
||||||
import tomli_w
|
import tomli_w
|
||||||
import backoff
|
import backoff
|
||||||
import yaml
|
import yaml
|
||||||
|
import rq
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -32,7 +33,8 @@ except ImportError:
|
||||||
"appdirs",
|
"appdirs",
|
||||||
"tomli-w",
|
"tomli-w",
|
||||||
"backoff",
|
"backoff",
|
||||||
"pyyaml"
|
"pyyaml",
|
||||||
|
"rq"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
@ -593,10 +595,15 @@ async def async_chat_completions(request: Request):
|
||||||
@router.get("/queue/response/{task_id}", dependencies=[Depends(user_api_key_auth)])
|
@router.get("/queue/response/{task_id}", dependencies=[Depends(user_api_key_auth)])
|
||||||
async def async_chat_completions(request: Request, task_id: str):
|
async def async_chat_completions(request: Request, task_id: str):
|
||||||
global redis_connection, redis_job
|
global redis_connection, redis_job
|
||||||
job = redis_job.fetch(id=task_id, connection=redis_connection)
|
try:
|
||||||
print(f"job status: {job.get_status()}")
|
job = redis_job.fetch(id=task_id, connection=redis_connection)
|
||||||
result = job.result
|
print(f"job status: {job.get_status()}")
|
||||||
return {"status": job.get_status(), "result": result}
|
result = job.result
|
||||||
|
if result is not None:
|
||||||
|
status = "finished"
|
||||||
|
return {"status": status, "result": result}
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": "finished", "result": str(e)}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ollama_logs", dependencies=[Depends(user_api_key_auth)])
|
@router.get("/ollama_logs", dependencies=[Depends(user_api_key_auth)])
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue