diff --git a/docs/my-website/docs/proxy/metrics.md b/docs/my-website/docs/proxy/metrics.md index d113ff6fb..5b839dbd4 100644 --- a/docs/my-website/docs/proxy/metrics.md +++ b/docs/my-website/docs/proxy/metrics.md @@ -3,30 +3,37 @@ ## Response format ```json [ - { - "daily_spend": 7.9261938052047e+16, - "day": "2024-02-01T00:00:00", - "spend_per_model": {"azure/gpt-4": 7.9261938052047e+16}, - "spend_per_api_key": { - "76": 914495704992000.0, - "12": 905726697912000.0, - "71": 866312628003000.0, - "28": 865461799332000.0, - "13": 859151538396000.0 + daily_spend = [ + { + "daily_spend": 7.9261938052047e+16, + "day": "2024-02-01T00:00:00", + "spend_per_model": {"azure/gpt-4": 7.9261938052047e+16}, + "spend_per_api_key": { + "76": 914495704992000.0, + "12": 905726697912000.0, + "71": 866312628003000.0, + "28": 865461799332000.0, + "13": 859151538396000.0 + } + }, + { + "daily_spend": 7.938489251309491e+16, + "day": "2024-02-02T00:00:00", + "spend_per_model": {"gpt-3.5": 7.938489251309491e+16}, + "spend_per_api_key": { + "91": 896805036036000.0, + "78": 889692646082000.0, + "49": 885386687861000.0, + "28": 873869890984000.0, + "56": 867398637692000.0 + } } - }, - { - "daily_spend": 7.938489251309491e+16, - "day": "2024-02-02T00:00:00", - "spend_per_model": {"gpt-3.5": 7.938489251309491e+16}, - "spend_per_api_key": { - "91": 896805036036000.0, - "78": 889692646082000.0, - "49": 885386687861000.0, - "28": 873869890984000.0, - "56": 867398637692000.0 - } - } + + ], + total_spend = 200, + top_models = {"gpt4": 0.2, "vertexai/gemini-pro":10}, + top_api_keys = {"899922": 0.9, "838hcjd999seerr88": 20} + ] ``` \ No newline at end of file diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index 700423664..55eaaa090 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -62,7 +62,6 @@ def build_daily_metrics(): ) # get daily spend per model - daily_spend_per_model = click_house_client.query_df( """ SELECT sumMerge(DailySpend) as daily_spend, day, model FROM daily_aggregated_spend_per_model GROUP BY day, model @@ -82,6 +81,7 @@ def build_daily_metrics(): # Display the resulting dictionary + # get daily spend per API key daily_spend_per_api_key = click_house_client.query_df( """ SELECT @@ -122,6 +122,23 @@ def build_daily_metrics(): # Display the resulting dictionary + # Calculate total spend across all days + total_spend = daily_spend["daily_spend"].sum() + + # Identify top models and top API keys with the highest spend across all days + top_models = {} + top_api_keys = {} + + for day, spend_per_model in result_dict.items(): + for model, model_spend in spend_per_model.items(): + if model not in top_models or model_spend > top_models[model]: + top_models[model] = model_spend + + for day, spend_per_api_key in api_key_result_dict.items(): + for api_key, api_key_spend in spend_per_api_key.items(): + if api_key not in top_api_keys or api_key_spend > top_api_keys[api_key]: + top_api_keys[api_key] = api_key_spend + # for each day in daily spend, look up the day in result_dict and api_key_result_dict # Assuming daily_spend DataFrame has 'day' column result = [] @@ -143,8 +160,16 @@ def build_daily_metrics(): result.append(data_day) - # print("FINAL daily metric", result) - return result + data_to_return = {} + data_to_return["daily_spend"] = result + + data_to_return["total_spend"] = total_spend + data_to_return["top_models"] = top_models + data_to_return["top_api_keys"] = top_api_keys + return data_to_return + + +# build_daily_metrics() def _create_clickhouse_material_views(client=None, table_names=[]):