mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
OpenAI /v1/realtime
api support (#6047)
* feat(azure/realtime): initial working commit for proxy azure openai realtime endpoint support Adds support for passing /v1/realtime calls via litellm proxy * feat(realtime_api/main.py): abstraction for handling openai realtime api calls * feat(router.py): add `arealtime()` endpoint in router for realtime api calls Allows using `model_list` in proxy for realtime as well * fix: make realtime api a private function Structure might change based on feedback. Make that clear to users. * build(requirements.txt): add websockets to the requirements.txt * feat(openai/realtime): add openai /v1/realtime api support
This commit is contained in:
parent
130842537f
commit
f9d0bcc5a1
11 changed files with 350 additions and 7 deletions
91
litellm/realtime_api/main.py
Normal file
91
litellm/realtime_api/main.py
Normal file
|
@ -0,0 +1,91 @@
|
|||
"""Abstraction function for OpenAI's realtime API"""
|
||||
|
||||
import os
|
||||
from typing import Any, Optional
|
||||
|
||||
import litellm
|
||||
from litellm import get_llm_provider
|
||||
from litellm.secret_managers.main import get_secret_str
|
||||
from litellm.types.router import GenericLiteLLMParams
|
||||
|
||||
from ..llms.AzureOpenAI.realtime.handler import AzureOpenAIRealtime
|
||||
from ..llms.OpenAI.realtime.handler import OpenAIRealtime
|
||||
|
||||
azure_realtime = AzureOpenAIRealtime()
|
||||
openai_realtime = OpenAIRealtime()
|
||||
|
||||
|
||||
async def _arealtime(
|
||||
model: str,
|
||||
websocket: Any, # fastapi websocket
|
||||
api_base: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
api_version: Optional[str] = None,
|
||||
azure_ad_token: Optional[str] = None,
|
||||
client: Optional[Any] = None,
|
||||
timeout: Optional[float] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Private function to handle the realtime API call.
|
||||
|
||||
For PROXY use only.
|
||||
"""
|
||||
litellm_params = GenericLiteLLMParams(**kwargs)
|
||||
|
||||
model, _custom_llm_provider, dynamic_api_key, dynamic_api_base = get_llm_provider(
|
||||
model=model,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
if _custom_llm_provider == "azure":
|
||||
api_base = (
|
||||
dynamic_api_base
|
||||
or litellm_params.api_base
|
||||
or litellm.api_base
|
||||
or get_secret_str("AZURE_API_BASE")
|
||||
)
|
||||
# set API KEY
|
||||
api_key = (
|
||||
dynamic_api_key
|
||||
or litellm.api_key
|
||||
or litellm.openai_key
|
||||
or get_secret_str("AZURE_API_KEY")
|
||||
)
|
||||
|
||||
await azure_realtime.async_realtime(
|
||||
model=model,
|
||||
websocket=websocket,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
api_version="2024-10-01-preview",
|
||||
azure_ad_token=None,
|
||||
client=None,
|
||||
timeout=timeout,
|
||||
)
|
||||
elif _custom_llm_provider == "openai":
|
||||
api_base = (
|
||||
dynamic_api_base
|
||||
or litellm_params.api_base
|
||||
or litellm.api_base
|
||||
or "https://api.openai.com/"
|
||||
)
|
||||
# set API KEY
|
||||
api_key = (
|
||||
dynamic_api_key
|
||||
or litellm.api_key
|
||||
or litellm.openai_key
|
||||
or get_secret_str("OPENAI_API_KEY")
|
||||
)
|
||||
|
||||
await openai_realtime.async_realtime(
|
||||
model=model,
|
||||
websocket=websocket,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
client=None,
|
||||
timeout=timeout,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported model: {model}")
|
Loading…
Add table
Add a link
Reference in a new issue