From 69452f003d63758e1590d658bf61423802377f63 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Sat, 11 May 2024 11:21:53 -0700 Subject: [PATCH] ui - show token hashes on ui --- ...odel_prices_and_context_window_backup.json | 142 ++++++++++++++++-- .../src/components/view_key_spend_report.tsx | 141 ----------------- .../src/components/view_key_table.tsx | 27 ++-- 3 files changed, 142 insertions(+), 168 deletions(-) delete mode 100644 ui/litellm-dashboard/src/components/view_key_spend_report.tsx diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 10c70a858..1ade08fe3 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -1571,6 +1571,135 @@ "litellm_provider": "replicate", "mode": "chat" }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + "max_tokens": 65536, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-1.5": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000025, + "output_cost_per_token": 0.0000075, + "input_cost_per_image": 0.00265, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + "max_tokens": 65536, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000065, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cohere/command-r-plus": { + "max_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/databricks/dbrx-instruct": { + "max_tokens": 32768, + "input_cost_per_token": 0.0000006, + "output_cost_per_token": 0.0000006, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-3-haiku": { + "max_tokens": 200000, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.00000125, + "input_cost_per_image": 0.0004, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3-sonnet": { + "max_tokens": 200000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-large": { + "max_tokens": 32000, + "input_cost_per_token": 0.000008, + "output_cost_per_token": 0.000024, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + "max_tokens": 32769, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000005, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-vision": { + "max_tokens": 45875, + "input_cost_per_token": 0.000000125, + "output_cost_per_token": 0.000000375, + "input_cost_per_image": 0.0025, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/fireworks/firellava-13b": { + "max_tokens": 4096, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000002, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + "max_tokens": 16384, + "input_cost_per_token": 0.000000225, + "output_cost_per_token": 0.00000225, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + "max_tokens": 8192, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 0.00000059, + "output_cost_per_token": 0.00000079, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/gpt-4-vision-preview": { + "max_tokens": 130000, + "input_cost_per_token": 0.00001, + "output_cost_per_token": 0.00003, + "input_cost_per_image": 0.01445, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, "openrouter/openai/gpt-3.5-turbo": { "max_tokens": 4095, "input_cost_per_token": 0.0000015, @@ -1621,14 +1750,14 @@ "tool_use_system_prompt_tokens": 395 }, "openrouter/google/palm-2-chat-bison": { - "max_tokens": 8000, + "max_tokens": 25804, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", "mode": "chat" }, "openrouter/google/palm-2-codechat-bison": { - "max_tokens": 8000, + "max_tokens": 20070, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", @@ -1711,13 +1840,6 @@ "litellm_provider": "openrouter", "mode": "chat" }, - "openrouter/meta-llama/llama-3-70b-instruct": { - "max_tokens": 8192, - "input_cost_per_token": 0.0000008, - "output_cost_per_token": 0.0000008, - "litellm_provider": "openrouter", - "mode": "chat" - }, "j2-ultra": { "max_tokens": 8192, "max_input_tokens": 8192, @@ -3226,4 +3348,4 @@ "mode": "embedding" } -} +} \ No newline at end of file diff --git a/ui/litellm-dashboard/src/components/view_key_spend_report.tsx b/ui/litellm-dashboard/src/components/view_key_spend_report.tsx deleted file mode 100644 index 6741db2a9..000000000 --- a/ui/litellm-dashboard/src/components/view_key_spend_report.tsx +++ /dev/null @@ -1,141 +0,0 @@ -"use client"; - -import React, { useState, useEffect } from "react"; -import { - Button as Button2, - Modal, - Form, - Input, - InputNumber, - Select, - message, -} from "antd"; -import { - Button, - Text, - Card, - Table, - BarChart, - Title, - Subtitle, - BarList, - Metric, -} from "@tremor/react"; -import { keySpendLogsCall, PredictedSpendLogsCall } from "./networking"; - -interface ViewKeySpendReportProps { - token: string; - accessToken: string; - keySpend: number; - keyBudget: number; - keyName: string; -} - -type ResponseValueType = { - startTime: string; // Assuming startTime is a string, adjust it if it's of a different type - spend: number; // Assuming spend is a number, adjust it if it's of a different type - user: string; // Assuming user is a string, adjust it if it's of a different type -}; - -const ViewKeySpendReport: React.FC = ({ - token, - accessToken, - keySpend, - keyBudget, - keyName, -}) => { - const [isModalVisible, setIsModalVisible] = useState(false); - const [data, setData] = useState<{ day: string; spend: number }[] | null>( - null - ); - const [predictedSpendString, setPredictedSpendString] = useState(""); - const [userData, setUserData] = useState< - { name: string; value: number }[] | null - >(null); - - const showModal = () => { - console.log("Show Modal triggered"); - setIsModalVisible(true); - fetchData(); - }; - - const handleOk = () => { - setIsModalVisible(false); - }; - - const handleCancel = () => { - setIsModalVisible(false); - }; - - // call keySpendLogsCall and set the data - const fetchData = async () => { - try { - if (accessToken == null || token == null) { - return; - } - console.log(`accessToken: ${accessToken}; token: ${token}`); - const response = await keySpendLogsCall( - (accessToken = accessToken), - (token = token) - ); - console.log("Response:", response); - setData(response); - - // predict spend based on response - const predictedSpend = await PredictedSpendLogsCall(accessToken, response); - console.log("Response2:", predictedSpend); - - // append predictedSpend to data - const combinedData = [...response, ...predictedSpend.response]; - setData(combinedData); - setPredictedSpendString(predictedSpend.predicted_spend) - - console.log("Combined Data:", combinedData); - // setPredictedSpend(predictedSpend); - - } catch (error) { - console.error("There was an error fetching the data", error); - } - }; - - - if (!token) { - return null; - } - - return ( -
- - - Key Name: {keyName} - - Monthly Spend ${keySpend} - {predictedSpendString} - - - {data && ( - - )} - - - -
- ); -}; - -export default ViewKeySpendReport; diff --git a/ui/litellm-dashboard/src/components/view_key_table.tsx b/ui/litellm-dashboard/src/components/view_key_table.tsx index d812c19a3..bb6ffc420 100644 --- a/ui/litellm-dashboard/src/components/view_key_table.tsx +++ b/ui/litellm-dashboard/src/components/view_key_table.tsx @@ -17,6 +17,7 @@ import { DialogPanel, Text, Title, + Subtitle, Icon, BarChart, } from "@tremor/react"; @@ -32,8 +33,6 @@ import { Select, } from "antd"; -import ViewKeySpendReport from "./view_key_spend_report"; - const { Option } = Select; @@ -570,6 +569,7 @@ const ViewKeyTable: React.FC = ({ icon={InformationCircleIcon} size="sm" /> + = ({ setOpenDialogId(null); setSelectedItem(null); }} + static={true} > @@ -639,22 +640,14 @@ const ViewKeyTable: React.FC = ({ - - {spendData && ( - - )} - - + + Token Name + {selectedItem.key_alias ? selectedItem.key_alias : selectedItem.key_name} + Token ID + {selectedItem.token} Metadata - - {JSON.stringify(selectedItem.metadata)} +
{JSON.stringify(selectedItem.metadata)} 
+