mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
(feat) Track custom_llm_provider
in LiteLLMSpendLogs (#7081)
* add custom_llm_provider to SpendLogsPayload * add custom_llm_provider to SpendLogs * add custom llm provider to SpendLogs payload * test_spend_logs_payload
This commit is contained in:
parent
0e39cb58d8
commit
d4c52e2921
5 changed files with 7 additions and 0 deletions
|
@ -1812,6 +1812,7 @@ class SpendLogsPayload(TypedDict):
|
|||
team_id: Optional[str]
|
||||
end_user: Optional[str]
|
||||
requester_ip_address: Optional[str]
|
||||
custom_llm_provider: Optional[str]
|
||||
|
||||
|
||||
class SpanAttributes(str, enum.Enum):
|
||||
|
|
|
@ -191,6 +191,7 @@ model LiteLLM_SpendLogs {
|
|||
model String @default("")
|
||||
model_id String? @default("") // the model id stored in proxy model db
|
||||
model_group String? @default("") // public model_name / model_group
|
||||
custom_llm_provider String? @default("") // litellm used custom_llm_provider
|
||||
api_base String? @default("")
|
||||
user String? @default("")
|
||||
metadata Json? @default("{}")
|
||||
|
|
|
@ -153,6 +153,7 @@ def get_logging_payload(
|
|||
model_group=_model_group,
|
||||
model_id=_model_id,
|
||||
requester_ip_address=clean_metadata.get("requester_ip_address", None),
|
||||
custom_llm_provider=kwargs.get("custom_llm_provider", ""),
|
||||
)
|
||||
|
||||
verbose_proxy_logger.debug(
|
||||
|
|
|
@ -191,6 +191,7 @@ model LiteLLM_SpendLogs {
|
|||
model String @default("")
|
||||
model_id String? @default("") // the model id stored in proxy model db
|
||||
model_group String? @default("") // public model_name / model_group
|
||||
custom_llm_provider String? @default("") // litellm used custom_llm_provider
|
||||
api_base String? @default("")
|
||||
user String? @default("")
|
||||
metadata Json? @default("{}")
|
||||
|
|
|
@ -45,6 +45,7 @@ def test_spend_logs_payload(model_id: Optional[str]):
|
|||
{"role": "system", "content": "you are a helpful assistant.\n"},
|
||||
{"role": "user", "content": "bom dia"},
|
||||
],
|
||||
"custom_llm_provider": "azure",
|
||||
"optional_params": {
|
||||
"stream": False,
|
||||
"max_tokens": 10,
|
||||
|
@ -211,6 +212,8 @@ def test_spend_logs_payload(model_id: Optional[str]):
|
|||
payload["request_tags"] == '["model-anthropic-claude-v2.1", "app-ishaan-prod"]'
|
||||
)
|
||||
|
||||
assert payload["custom_llm_provider"] == "azure"
|
||||
|
||||
|
||||
def test_spend_logs_payload_whisper():
|
||||
"""
|
Loading…
Add table
Add a link
Reference in a new issue