(feat) proxy - define model info

This commit is contained in:
ishaan-jaff 2023-12-06 12:06:28 -08:00
parent 102de97960
commit 29fb97f88a
2 changed files with 8 additions and 2 deletions

View file

@ -10,6 +10,8 @@ model_list:
input_cost_per_token: 0.0.00006 input_cost_per_token: 0.0.00006
output_cost_per_token: 0.00003 output_cost_per_token: 0.00003
max_tokens: 4096 max_tokens: 4096
base_model: gpt-35-turbo
- model_name: openai-gpt-3.5 - model_name: openai-gpt-3.5
litellm_params: litellm_params:
model: gpt-3.5-turbo model: gpt-3.5-turbo

View file

@ -1,5 +1,5 @@
from pydantic import BaseModel, Extra from pydantic import BaseModel, Extra
from typing import Optional, List, Union, Dict from typing import Optional, List, Union, Dict, Literal
from datetime import datetime from datetime import datetime
import uuid import uuid
######### Request Class Definition ###### ######### Request Class Definition ######
@ -40,7 +40,11 @@ class ProxyChatCompletionRequest(BaseModel):
class ModelInfo(BaseModel): class ModelInfo(BaseModel):
id: Optional[str] id: Optional[str]
mode: Optional[str] mode: Optional[Literal['embedding', 'chat', 'completion']]
input_cost_per_token: Optional[float]
output_cost_per_token: Optional[float]
max_tokens: Optional[int]
base_model: Optional[Literal['gpt-4-1106-preview', 'gpt-4-32k', 'gpt-4', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo']]
class Config: class Config:
extra = Extra.allow # Allow extra fields extra = Extra.allow # Allow extra fields