mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(types/router.py): add custom pricing info to 'model_info'
Fixes https://github.com/BerriAI/litellm/issues/4542
This commit is contained in:
parent
b8fa4023ea
commit
8625770010
3 changed files with 18 additions and 3 deletions
|
@ -9,9 +9,10 @@ model_list:
|
||||||
- litellm_params:
|
- litellm_params:
|
||||||
api_base: http://0.0.0.0:8080
|
api_base: http://0.0.0.0:8080
|
||||||
api_key: ''
|
api_key: ''
|
||||||
model: openai/my-fake-model
|
model: gpt-4o
|
||||||
rpm: 800
|
rpm: 800
|
||||||
model_name: gpt-3.5-turbo-fake-model
|
input_cost_per_token: 300
|
||||||
|
model_name: gpt-4o
|
||||||
- model_name: llama3-70b-8192
|
- model_name: llama3-70b-8192
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: groq/llama3-70b-8192
|
model: groq/llama3-70b-8192
|
||||||
|
|
|
@ -67,6 +67,7 @@ from litellm.types.llms.openai import (
|
||||||
Thread,
|
Thread,
|
||||||
)
|
)
|
||||||
from litellm.types.router import (
|
from litellm.types.router import (
|
||||||
|
SPECIAL_MODEL_INFO_PARAMS,
|
||||||
AlertingConfig,
|
AlertingConfig,
|
||||||
AllowedFailsPolicy,
|
AllowedFailsPolicy,
|
||||||
AssistantsTypedDict,
|
AssistantsTypedDict,
|
||||||
|
@ -3794,7 +3795,7 @@ class Router:
|
||||||
deployment = Deployment(
|
deployment = Deployment(
|
||||||
**model,
|
**model,
|
||||||
model_name=_model_name,
|
model_name=_model_name,
|
||||||
litellm_params=_litellm_params, # type: ignore
|
litellm_params=LiteLLM_Params(**_litellm_params),
|
||||||
model_info=_model_info,
|
model_info=_model_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -324,6 +324,9 @@ class DeploymentTypedDict(TypedDict):
|
||||||
litellm_params: LiteLLMParamsTypedDict
|
litellm_params: LiteLLMParamsTypedDict
|
||||||
|
|
||||||
|
|
||||||
|
SPECIAL_MODEL_INFO_PARAMS = ["input_cost_per_token", "output_cost_per_token"]
|
||||||
|
|
||||||
|
|
||||||
class Deployment(BaseModel):
|
class Deployment(BaseModel):
|
||||||
model_name: str
|
model_name: str
|
||||||
litellm_params: LiteLLM_Params
|
litellm_params: LiteLLM_Params
|
||||||
|
@ -342,6 +345,16 @@ class Deployment(BaseModel):
|
||||||
model_info = ModelInfo()
|
model_info = ModelInfo()
|
||||||
elif isinstance(model_info, dict):
|
elif isinstance(model_info, dict):
|
||||||
model_info = ModelInfo(**model_info)
|
model_info = ModelInfo(**model_info)
|
||||||
|
|
||||||
|
for (
|
||||||
|
key
|
||||||
|
) in (
|
||||||
|
SPECIAL_MODEL_INFO_PARAMS
|
||||||
|
): # ensures custom pricing info is consistently in 'model_info'
|
||||||
|
field = getattr(litellm_params, key, None)
|
||||||
|
if field is not None:
|
||||||
|
setattr(model_info, key, field)
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
model_info=model_info,
|
model_info=model_info,
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue