track custom_llm_provider in SpendLogs

This commit is contained in:
Ishaan Jaff 2024-11-19 10:27:24 -08:00
parent 7550aba474
commit 244152fa82
4 changed files with 4 additions and 0 deletions

View file

@ -1773,6 +1773,7 @@ class SpendLogsPayload(TypedDict):
model_id: Optional[str]
model_group: Optional[str]
api_base: str
custom_llm_provider: Optional[str]
user: str
metadata: str # json str
cache_hit: str

View file

@ -192,6 +192,7 @@ model LiteLLM_SpendLogs {
model_id String? @default("") // the model id stored in proxy model db
model_group String? @default("") // public model_name / model_group
api_base String? @default("")
custom_llm_provider String? @default("") // openai, vertex_ai etc
user String? @default("")
metadata Json? @default("{}")
cache_hit String? @default("")

View file

@ -150,6 +150,7 @@ def get_logging_payload(
request_tags=request_tags,
end_user=end_user_id or "",
api_base=litellm_params.get("api_base", ""),
custom_llm_provider=litellm_params.get("custom_llm_provider", None),
model_group=_model_group,
model_id=_model_id,
requester_ip_address=clean_metadata.get("requester_ip_address", None),

View file

@ -192,6 +192,7 @@ model LiteLLM_SpendLogs {
model_id String? @default("") // the model id stored in proxy model db
model_group String? @default("") // public model_name / model_group
api_base String? @default("")
custom_llm_provider String? @default("") // openai, vertex_ai etc
user String? @default("")
metadata Json? @default("{}")
cache_hit String? @default("")