mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
OpenAI /v1/realtime
api support (#6047)
* feat(azure/realtime): initial working commit for proxy azure openai realtime endpoint support Adds support for passing /v1/realtime calls via litellm proxy * feat(realtime_api/main.py): abstraction for handling openai realtime api calls * feat(router.py): add `arealtime()` endpoint in router for realtime api calls Allows using `model_list` in proxy for realtime as well * fix: make realtime api a private function Structure might change based on feedback. Make that clear to users. * build(requirements.txt): add websockets to the requirements.txt * feat(openai/realtime): add openai /v1/realtime api support
This commit is contained in:
parent
130842537f
commit
f9d0bcc5a1
11 changed files with 350 additions and 7 deletions
|
@ -58,6 +58,7 @@ async def route_request(
|
|||
"atranscription",
|
||||
"amoderation",
|
||||
"arerank",
|
||||
"_arealtime", # private function for realtime API
|
||||
],
|
||||
):
|
||||
"""
|
||||
|
@ -65,7 +66,6 @@ async def route_request(
|
|||
|
||||
"""
|
||||
router_model_names = llm_router.model_names if llm_router is not None else []
|
||||
|
||||
if "api_key" in data or "api_base" in data:
|
||||
return getattr(litellm, f"{route_type}")(**data)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue