mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Realtime API: Support 'base_model' cost tracking + show response in spend logs (if enabled) (#9897)
* refactor(litellm_logging.py): refactor realtime cost tracking to use common code as rest Ensures basic features like base model just work * feat(realtime/): support 'base_model' cost tracking on realtime api Fixes issue where base model was not working on realtime * fix: fix ruff linting error * test: fix test
This commit is contained in:
parent
78879c68a9
commit
9f27e8363f
6 changed files with 102 additions and 46 deletions
|
@ -7,6 +7,7 @@ from litellm import get_llm_provider
|
|||
from litellm.secret_managers.main import get_secret_str
|
||||
from litellm.types.router import GenericLiteLLMParams
|
||||
|
||||
from ..litellm_core_utils.get_litellm_params import get_litellm_params
|
||||
from ..litellm_core_utils.litellm_logging import Logging as LiteLLMLogging
|
||||
from ..llms.azure.realtime.handler import AzureOpenAIRealtime
|
||||
from ..llms.openai.realtime.handler import OpenAIRealtime
|
||||
|
@ -34,13 +35,11 @@ async def _arealtime(
|
|||
For PROXY use only.
|
||||
"""
|
||||
litellm_logging_obj: LiteLLMLogging = kwargs.get("litellm_logging_obj") # type: ignore
|
||||
litellm_call_id: Optional[str] = kwargs.get("litellm_call_id", None)
|
||||
proxy_server_request = kwargs.get("proxy_server_request", None)
|
||||
model_info = kwargs.get("model_info", None)
|
||||
metadata = kwargs.get("metadata", {})
|
||||
user = kwargs.get("user", None)
|
||||
litellm_params = GenericLiteLLMParams(**kwargs)
|
||||
|
||||
litellm_params_dict = get_litellm_params(**kwargs)
|
||||
|
||||
model, _custom_llm_provider, dynamic_api_key, dynamic_api_base = get_llm_provider(
|
||||
model=model,
|
||||
api_base=api_base,
|
||||
|
@ -51,14 +50,7 @@ async def _arealtime(
|
|||
model=model,
|
||||
user=user,
|
||||
optional_params={},
|
||||
litellm_params={
|
||||
"litellm_call_id": litellm_call_id,
|
||||
"proxy_server_request": proxy_server_request,
|
||||
"model_info": model_info,
|
||||
"metadata": metadata,
|
||||
"preset_cache_key": None,
|
||||
"stream_response": {},
|
||||
},
|
||||
litellm_params=litellm_params_dict,
|
||||
custom_llm_provider=_custom_llm_provider,
|
||||
)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue