ui - show token hashes on ui

This commit is contained in:
Ishaan Jaff 2024-05-11 11:21:53 -07:00
parent d33e49411d
commit 69452f003d
3 changed files with 142 additions and 168 deletions

View file

@ -1571,6 +1571,135 @@
"litellm_provider": "replicate",
"mode": "chat"
},
"openrouter/microsoft/wizardlm-2-8x22b:nitro": {
"max_tokens": 65536,
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/google/gemini-pro-1.5": {
"max_tokens": 8192,
"max_input_tokens": 1000000,
"max_output_tokens": 8192,
"input_cost_per_token": 0.0000025,
"output_cost_per_token": 0.0000075,
"input_cost_per_image": 0.00265,
"litellm_provider": "openrouter",
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true
},
"openrouter/mistralai/mixtral-8x22b-instruct": {
"max_tokens": 65536,
"input_cost_per_token": 0.00000065,
"output_cost_per_token": 0.00000065,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/cohere/command-r-plus": {
"max_tokens": 128000,
"input_cost_per_token": 0.000003,
"output_cost_per_token": 0.000015,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/databricks/dbrx-instruct": {
"max_tokens": 32768,
"input_cost_per_token": 0.0000006,
"output_cost_per_token": 0.0000006,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/anthropic/claude-3-haiku": {
"max_tokens": 200000,
"input_cost_per_token": 0.00000025,
"output_cost_per_token": 0.00000125,
"input_cost_per_image": 0.0004,
"litellm_provider": "openrouter",
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true
},
"openrouter/anthropic/claude-3-sonnet": {
"max_tokens": 200000,
"input_cost_per_token": 0.000003,
"output_cost_per_token": 0.000015,
"input_cost_per_image": 0.0048,
"litellm_provider": "openrouter",
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true
},
"openrouter/mistralai/mistral-large": {
"max_tokens": 32000,
"input_cost_per_token": 0.000008,
"output_cost_per_token": 0.000024,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/cognitivecomputations/dolphin-mixtral-8x7b": {
"max_tokens": 32769,
"input_cost_per_token": 0.0000005,
"output_cost_per_token": 0.0000005,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/google/gemini-pro-vision": {
"max_tokens": 45875,
"input_cost_per_token": 0.000000125,
"output_cost_per_token": 0.000000375,
"input_cost_per_image": 0.0025,
"litellm_provider": "openrouter",
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true
},
"openrouter/fireworks/firellava-13b": {
"max_tokens": 4096,
"input_cost_per_token": 0.0000002,
"output_cost_per_token": 0.0000002,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/meta-llama/llama-3-8b-instruct:free": {
"max_tokens": 8192,
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/meta-llama/llama-3-8b-instruct:extended": {
"max_tokens": 16384,
"input_cost_per_token": 0.000000225,
"output_cost_per_token": 0.00000225,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/meta-llama/llama-3-70b-instruct:nitro": {
"max_tokens": 8192,
"input_cost_per_token": 0.0000009,
"output_cost_per_token": 0.0000009,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/meta-llama/llama-3-70b-instruct": {
"max_tokens": 8192,
"input_cost_per_token": 0.00000059,
"output_cost_per_token": 0.00000079,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/openai/gpt-4-vision-preview": {
"max_tokens": 130000,
"input_cost_per_token": 0.00001,
"output_cost_per_token": 0.00003,
"input_cost_per_image": 0.01445,
"litellm_provider": "openrouter",
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true
},
"openrouter/openai/gpt-3.5-turbo": {
"max_tokens": 4095,
"input_cost_per_token": 0.0000015,
@ -1621,14 +1750,14 @@
"tool_use_system_prompt_tokens": 395
},
"openrouter/google/palm-2-chat-bison": {
"max_tokens": 8000,
"max_tokens": 25804,
"input_cost_per_token": 0.0000005,
"output_cost_per_token": 0.0000005,
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/google/palm-2-codechat-bison": {
"max_tokens": 8000,
"max_tokens": 20070,
"input_cost_per_token": 0.0000005,
"output_cost_per_token": 0.0000005,
"litellm_provider": "openrouter",
@ -1711,13 +1840,6 @@
"litellm_provider": "openrouter",
"mode": "chat"
},
"openrouter/meta-llama/llama-3-70b-instruct": {
"max_tokens": 8192,
"input_cost_per_token": 0.0000008,
"output_cost_per_token": 0.0000008,
"litellm_provider": "openrouter",
"mode": "chat"
},
"j2-ultra": {
"max_tokens": 8192,
"max_input_tokens": 8192,
@ -3226,4 +3348,4 @@
"mode": "embedding"
}
}
}

View file

@ -1,141 +0,0 @@
"use client";
import React, { useState, useEffect } from "react";
import {
Button as Button2,
Modal,
Form,
Input,
InputNumber,
Select,
message,
} from "antd";
import {
Button,
Text,
Card,
Table,
BarChart,
Title,
Subtitle,
BarList,
Metric,
} from "@tremor/react";
import { keySpendLogsCall, PredictedSpendLogsCall } from "./networking";
interface ViewKeySpendReportProps {
token: string;
accessToken: string;
keySpend: number;
keyBudget: number;
keyName: string;
}
type ResponseValueType = {
startTime: string; // Assuming startTime is a string, adjust it if it's of a different type
spend: number; // Assuming spend is a number, adjust it if it's of a different type
user: string; // Assuming user is a string, adjust it if it's of a different type
};
const ViewKeySpendReport: React.FC<ViewKeySpendReportProps> = ({
token,
accessToken,
keySpend,
keyBudget,
keyName,
}) => {
const [isModalVisible, setIsModalVisible] = useState(false);
const [data, setData] = useState<{ day: string; spend: number }[] | null>(
null
);
const [predictedSpendString, setPredictedSpendString] = useState("");
const [userData, setUserData] = useState<
{ name: string; value: number }[] | null
>(null);
const showModal = () => {
console.log("Show Modal triggered");
setIsModalVisible(true);
fetchData();
};
const handleOk = () => {
setIsModalVisible(false);
};
const handleCancel = () => {
setIsModalVisible(false);
};
// call keySpendLogsCall and set the data
const fetchData = async () => {
try {
if (accessToken == null || token == null) {
return;
}
console.log(`accessToken: ${accessToken}; token: ${token}`);
const response = await keySpendLogsCall(
(accessToken = accessToken),
(token = token)
);
console.log("Response:", response);
setData(response);
// predict spend based on response
const predictedSpend = await PredictedSpendLogsCall(accessToken, response);
console.log("Response2:", predictedSpend);
// append predictedSpend to data
const combinedData = [...response, ...predictedSpend.response];
setData(combinedData);
setPredictedSpendString(predictedSpend.predicted_spend)
console.log("Combined Data:", combinedData);
// setPredictedSpend(predictedSpend);
} catch (error) {
console.error("There was an error fetching the data", error);
}
};
if (!token) {
return null;
}
return (
<div>
<Button size = "xs" onClick={showModal} variant="secondary">
Spend Report
</Button>
<Modal
visible={isModalVisible}
width={1400}
onOk={handleOk}
onCancel={handleCancel}
footer={null}
>
<Title style={{ textAlign: "left" }}>Key Name: {keyName}</Title>
<Metric>Monthly Spend ${keySpend}</Metric>
<Title>{predictedSpendString}</Title>
<Card className="mt-6 mb-6">
{data && (
<BarChart
className="mt-6"
data={data}
colors={["blue", "amber"]}
index="date"
categories={["spend", "predicted_spend"]}
yAxisWidth={80}
/>
)}
</Card>
</Modal>
</div>
);
};
export default ViewKeySpendReport;

View file

@ -17,6 +17,7 @@ import {
DialogPanel,
Text,
Title,
Subtitle,
Icon,
BarChart,
} from "@tremor/react";
@ -32,8 +33,6 @@ import {
Select,
} from "antd";
import ViewKeySpendReport from "./view_key_spend_report";
const { Option } = Select;
@ -570,6 +569,7 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
icon={InformationCircleIcon}
size="sm"
/>
<Dialog
open={openDialogId !== null}
@ -577,6 +577,7 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
setOpenDialogId(null);
setSelectedItem(null);
}}
static={true}
>
<DialogPanel>
@ -639,22 +640,14 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
</Card>
</div>
<Card className="mt-6 mb-6">
{spendData && (
<BarChart
className="mt-6"
data={spendData}
colors={["blue", "amber"]}
index="date"
categories={["spend", "predicted_spend"]}
yAxisWidth={80}
/>
)}
</Card>
<Card className="my-4">
<Title>Token Name</Title>
<Text className="my-1">{selectedItem.key_alias ? selectedItem.key_alias : selectedItem.key_name}</Text>
<Title>Token ID</Title>
<Text className="my-1">{selectedItem.token}</Text>
<Title>Metadata</Title>
<Text>{JSON.stringify(selectedItem.metadata)}</Text>
<Text className="my-1"><pre>{JSON.stringify(selectedItem.metadata)} </pre></Text>
</Card>
<Button