diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 10c70a858..1ade08fe3 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -1571,6 +1571,135 @@ "litellm_provider": "replicate", "mode": "chat" }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + "max_tokens": 65536, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-1.5": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000025, + "output_cost_per_token": 0.0000075, + "input_cost_per_image": 0.00265, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + "max_tokens": 65536, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000065, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cohere/command-r-plus": { + "max_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/databricks/dbrx-instruct": { + "max_tokens": 32768, + "input_cost_per_token": 0.0000006, + "output_cost_per_token": 0.0000006, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-3-haiku": { + "max_tokens": 200000, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.00000125, + "input_cost_per_image": 0.0004, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3-sonnet": { + "max_tokens": 200000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-large": { + "max_tokens": 32000, + "input_cost_per_token": 0.000008, + "output_cost_per_token": 0.000024, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + "max_tokens": 32769, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000005, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-vision": { + "max_tokens": 45875, + "input_cost_per_token": 0.000000125, + "output_cost_per_token": 0.000000375, + "input_cost_per_image": 0.0025, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/fireworks/firellava-13b": { + "max_tokens": 4096, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000002, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + "max_tokens": 16384, + "input_cost_per_token": 0.000000225, + "output_cost_per_token": 0.00000225, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + "max_tokens": 8192, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 0.00000059, + "output_cost_per_token": 0.00000079, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/gpt-4-vision-preview": { + "max_tokens": 130000, + "input_cost_per_token": 0.00001, + "output_cost_per_token": 0.00003, + "input_cost_per_image": 0.01445, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, "openrouter/openai/gpt-3.5-turbo": { "max_tokens": 4095, "input_cost_per_token": 0.0000015, @@ -1621,14 +1750,14 @@ "tool_use_system_prompt_tokens": 395 }, "openrouter/google/palm-2-chat-bison": { - "max_tokens": 8000, + "max_tokens": 25804, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", "mode": "chat" }, "openrouter/google/palm-2-codechat-bison": { - "max_tokens": 8000, + "max_tokens": 20070, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", @@ -1711,13 +1840,6 @@ "litellm_provider": "openrouter", "mode": "chat" }, - "openrouter/meta-llama/llama-3-70b-instruct": { - "max_tokens": 8192, - "input_cost_per_token": 0.0000008, - "output_cost_per_token": 0.0000008, - "litellm_provider": "openrouter", - "mode": "chat" - }, "j2-ultra": { "max_tokens": 8192, "max_input_tokens": 8192, @@ -3226,4 +3348,4 @@ "mode": "embedding" } -} +} \ No newline at end of file diff --git a/ui/litellm-dashboard/src/components/model_dashboard.tsx b/ui/litellm-dashboard/src/components/model_dashboard.tsx index 63ee9b41b..e11619165 100644 --- a/ui/litellm-dashboard/src/components/model_dashboard.tsx +++ b/ui/litellm-dashboard/src/components/model_dashboard.tsx @@ -152,7 +152,7 @@ const handleSubmit = async (formValues: Record, accessToken: string litellmExtraParams = JSON.parse(value); } catch (error) { - message.error("Failed to parse LiteLLM Extra Params: " + error, 20); + message.error("Failed to parse LiteLLM Extra Params: " + error, 10); throw new Error("Failed to parse litellm_extra_params: " + error); } for (const [key, value] of Object.entries(litellmExtraParams)) { @@ -188,7 +188,7 @@ const handleSubmit = async (formValues: Record, accessToken: string } catch (error) { - message.error("Failed to create model: " + error, 20); + message.error("Failed to create model: " + error, 10); } } diff --git a/ui/litellm-dashboard/src/components/networking.tsx b/ui/litellm-dashboard/src/components/networking.tsx index e1375e7d4..4a629ed47 100644 --- a/ui/litellm-dashboard/src/components/networking.tsx +++ b/ui/litellm-dashboard/src/components/networking.tsx @@ -43,7 +43,7 @@ export const modelCreateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -78,7 +78,7 @@ export const modelDeleteCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -120,7 +120,7 @@ export const keyCreateCall = async ( try { formValues.metadata = JSON.parse(formValues.metadata); } catch (error) { - message.error("Failed to parse metadata: " + error, 20); + message.error("Failed to parse metadata: " + error, 10); throw new Error("Failed to parse metadata: " + error); } } @@ -141,7 +141,7 @@ export const keyCreateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -183,7 +183,7 @@ export const userCreateCall = async ( try { formValues.metadata = JSON.parse(formValues.metadata); } catch (error) { - message.error("Failed to parse metadata: " + error, 20); + message.error("Failed to parse metadata: " + error, 10); throw new Error("Failed to parse metadata: " + error); } } @@ -204,7 +204,7 @@ export const userCreateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -237,7 +237,7 @@ export const keyDeleteCall = async (accessToken: String, user_key: String) => { if (!response.ok) { const errorData = await response.text(); - message.error("Failed to delete key: " + errorData, 20); + message.error("Failed to delete key: " + errorData, 10); throw new Error("Network response was not ok"); } @@ -269,7 +269,7 @@ export const teamDeleteCall = async (accessToken: String, teamID: String) => { if (!response.ok) { const errorData = await response.text(); - message.error("Failed to delete team: " + errorData, 20); + message.error("Failed to delete team: " + errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -314,7 +314,7 @@ export const userInfoCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -350,7 +350,7 @@ export const teamInfoCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -385,7 +385,7 @@ export const getTotalSpendCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -420,7 +420,7 @@ export const modelInfoCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -463,7 +463,7 @@ export const modelMetricsCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -506,7 +506,7 @@ export const modelMetricsSlowResponsesCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -547,7 +547,7 @@ export const modelExceptionsCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -583,7 +583,7 @@ export const modelAvailableCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -612,7 +612,7 @@ export const keySpendLogsCall = async (accessToken: String, token: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -641,7 +641,7 @@ export const teamSpendLogsCall = async (accessToken: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -670,7 +670,7 @@ export const tagsSpendLogsCall = async (accessToken: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -710,7 +710,7 @@ export const userSpendLogsCall = async ( }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -740,7 +740,7 @@ export const adminSpendLogsCall = async (accessToken: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -770,7 +770,7 @@ export const adminTopKeysCall = async (accessToken: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -825,7 +825,7 @@ export const adminTopEndUsersCall = async ( const response = await fetch(url, requestOptions); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -855,7 +855,7 @@ export const adminTopModelsCall = async (accessToken: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -886,7 +886,7 @@ export const keyInfoCall = async (accessToken: String, keys: String[]) => { if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -912,7 +912,7 @@ export const spendUsersCall = async (accessToken: String, userID: String) => { }); if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -950,7 +950,7 @@ export const userRequestModelCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to delete key: " + errorData, 20); + message.error("Failed to delete key: " + errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -980,7 +980,7 @@ export const userGetRequesedtModelsCall = async (accessToken: String) => { if (!response.ok) { const errorData = await response.text(); - message.error("Failed to delete key: " + errorData, 20); + message.error("Failed to delete key: " + errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -1020,7 +1020,7 @@ export const userGetAllUsersCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to delete key: " + errorData, 20); + message.error("Failed to delete key: " + errorData, 10); throw new Error("Network response was not ok"); } const data = await response.json(); @@ -1055,7 +1055,7 @@ export const teamCreateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1092,7 +1092,7 @@ export const keyUpdateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to update key: " + errorData, 20); + message.error("Failed to update key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1127,7 +1127,7 @@ export const teamUpdateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to update team: " + errorData, 20); + message.error("Failed to update team: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1162,7 +1162,7 @@ export const modelUpdateCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to update model: " + errorData, 20); + message.error("Failed to update model: " + errorData, 10); console.error("Error update from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1207,7 +1207,7 @@ export const teamMemberAddCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1247,7 +1247,7 @@ export const userUpdateUserCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error("Failed to create key: " + errorData, 20); + message.error("Failed to create key: " + errorData, 10); console.error("Error response from the server:", errorData); throw new Error("Network response was not ok"); } @@ -1287,7 +1287,7 @@ export const PredictedSpendLogsCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -1398,7 +1398,7 @@ export const getCallbacksCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } @@ -1441,7 +1441,7 @@ export const setCallbacksCall = async ( if (!response.ok) { const errorData = await response.text(); - message.error(errorData, 20); + message.error(errorData, 10); throw new Error("Network response was not ok"); } diff --git a/ui/litellm-dashboard/src/components/view_key_spend_report.tsx b/ui/litellm-dashboard/src/components/view_key_spend_report.tsx deleted file mode 100644 index 6741db2a9..000000000 --- a/ui/litellm-dashboard/src/components/view_key_spend_report.tsx +++ /dev/null @@ -1,141 +0,0 @@ -"use client"; - -import React, { useState, useEffect } from "react"; -import { - Button as Button2, - Modal, - Form, - Input, - InputNumber, - Select, - message, -} from "antd"; -import { - Button, - Text, - Card, - Table, - BarChart, - Title, - Subtitle, - BarList, - Metric, -} from "@tremor/react"; -import { keySpendLogsCall, PredictedSpendLogsCall } from "./networking"; - -interface ViewKeySpendReportProps { - token: string; - accessToken: string; - keySpend: number; - keyBudget: number; - keyName: string; -} - -type ResponseValueType = { - startTime: string; // Assuming startTime is a string, adjust it if it's of a different type - spend: number; // Assuming spend is a number, adjust it if it's of a different type - user: string; // Assuming user is a string, adjust it if it's of a different type -}; - -const ViewKeySpendReport: React.FC = ({ - token, - accessToken, - keySpend, - keyBudget, - keyName, -}) => { - const [isModalVisible, setIsModalVisible] = useState(false); - const [data, setData] = useState<{ day: string; spend: number }[] | null>( - null - ); - const [predictedSpendString, setPredictedSpendString] = useState(""); - const [userData, setUserData] = useState< - { name: string; value: number }[] | null - >(null); - - const showModal = () => { - console.log("Show Modal triggered"); - setIsModalVisible(true); - fetchData(); - }; - - const handleOk = () => { - setIsModalVisible(false); - }; - - const handleCancel = () => { - setIsModalVisible(false); - }; - - // call keySpendLogsCall and set the data - const fetchData = async () => { - try { - if (accessToken == null || token == null) { - return; - } - console.log(`accessToken: ${accessToken}; token: ${token}`); - const response = await keySpendLogsCall( - (accessToken = accessToken), - (token = token) - ); - console.log("Response:", response); - setData(response); - - // predict spend based on response - const predictedSpend = await PredictedSpendLogsCall(accessToken, response); - console.log("Response2:", predictedSpend); - - // append predictedSpend to data - const combinedData = [...response, ...predictedSpend.response]; - setData(combinedData); - setPredictedSpendString(predictedSpend.predicted_spend) - - console.log("Combined Data:", combinedData); - // setPredictedSpend(predictedSpend); - - } catch (error) { - console.error("There was an error fetching the data", error); - } - }; - - - if (!token) { - return null; - } - - return ( -
- - - Key Name: {keyName} - - Monthly Spend ${keySpend} - {predictedSpendString} - - - {data && ( - - )} - - - -
- ); -}; - -export default ViewKeySpendReport; diff --git a/ui/litellm-dashboard/src/components/view_key_table.tsx b/ui/litellm-dashboard/src/components/view_key_table.tsx index d812c19a3..8cab60c1a 100644 --- a/ui/litellm-dashboard/src/components/view_key_table.tsx +++ b/ui/litellm-dashboard/src/components/view_key_table.tsx @@ -17,6 +17,7 @@ import { DialogPanel, Text, Title, + Subtitle, Icon, BarChart, } from "@tremor/react"; @@ -32,8 +33,6 @@ import { Select, } from "antd"; -import ViewKeySpendReport from "./view_key_spend_report"; - const { Option } = Select; @@ -84,7 +83,6 @@ const ViewKeyTable: React.FC = ({ const [isButtonClicked, setIsButtonClicked] = useState(false); const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false); const [keyToDelete, setKeyToDelete] = useState(null); - const [openDialogId, setOpenDialogId] = React.useState(null); const [selectedItem, setSelectedItem] = useState(null); const [spendData, setSpendData] = useState<{ day: string; spend: number }[] | null>( null @@ -92,6 +90,7 @@ const ViewKeyTable: React.FC = ({ const [predictedSpendString, setPredictedSpendString] = useState(""); const [editModalVisible, setEditModalVisible] = useState(false); + const [infoDialogVisible, setInfoDialogVisible] = useState(false); const [selectedToken, setSelectedToken] = useState(null); const [userModels, setUserModels] = useState([]); const initialKnownTeamIDs: Set = new Set(); @@ -328,47 +327,6 @@ const ViewKeyTable: React.FC = ({ }; - - // call keySpendLogsCall and set the data - const fetchData = async (item: ItemData | null) => { - try { - if (accessToken == null || item == null) { - return; - } - console.log(`accessToken: ${accessToken}; token: ${item.token}`); - const response = await keySpendLogsCall(accessToken, item.token); - - console.log("Response:", response); - setSpendData(response); - - // predict spend based on response - try { - const predictedSpend = await PredictedSpendLogsCall(accessToken, response); - console.log("Response2:", predictedSpend); - - // append predictedSpend to data - const combinedData = [...response, ...predictedSpend.response]; - setSpendData(combinedData); - setPredictedSpendString(predictedSpend.predicted_spend) - - console.log("Combined Data:", combinedData); - } catch (error) { - console.error("There was an error fetching the predicted data", error); - } - - // setPredictedSpend(predictedSpend); - - } catch (error) { - console.error("There was an error fetching the data", error); - } - }; - - useEffect(() => { - fetchData(selectedItem); - }, [selectedItem]); - - - const handleDelete = async (token: any) => { console.log("handleDelete:", token); if (token.token == null) { @@ -415,13 +373,6 @@ const ViewKeyTable: React.FC = ({ if (data == null) { return; } - - // useEffect(() => { - // if (openDialogId !== null && selectedItem !== null) { - // fetchData(selectedItem); - // } - // }, [openDialogId, selectedItem]); - console.log("RERENDER TRIGGERED"); return (
@@ -564,25 +515,27 @@ const ViewKeyTable: React.FC = ({ { - setSelectedItem(item); - setOpenDialogId(item.id); + setSelectedToken(item); + setInfoDialogVisible(true); }} icon={InformationCircleIcon} size="sm" /> + - { - setOpenDialogId(null); - setSelectedItem(null); - }} + { + setInfoDialogVisible(false); + setSelectedToken(null); + }} + footer={null} + width={800} + > -> - - {selectedItem && ( + {selectedToken && ( <> -
+

Spend @@ -591,9 +544,9 @@ const ViewKeyTable: React.FC = ({

{(() => { try { - return parseFloat(selectedItem.spend).toFixed(4); + return parseFloat(selectedToken.spend).toFixed(4); } catch (error) { - return selectedItem.spend; + return selectedToken.spend; } })()} @@ -606,8 +559,8 @@ const ViewKeyTable: React.FC = ({

- {selectedItem.max_budget != null ? ( - <>{selectedItem.max_budget} + {selectedToken.max_budget != null ? ( + <>{selectedToken.max_budget} ) : ( <>Unlimited )} @@ -620,9 +573,9 @@ const ViewKeyTable: React.FC = ({

- {selectedItem.expires != null ? ( + {selectedToken.expires != null ? ( <> - {new Date(selectedItem.expires).toLocaleString(undefined, { + {new Date(selectedToken.expires).toLocaleString(undefined, { day: 'numeric', month: 'long', year: 'numeric', @@ -639,38 +592,28 @@ const ViewKeyTable: React.FC = ({

- - {spendData && ( - - )} - - + + Token Name + {selectedToken.key_alias ? selectedToken.key_alias : selectedToken.key_name} + Token ID + {selectedToken.token} Metadata - - {JSON.stringify(selectedItem.metadata)} - +
{JSON.stringify(selectedToken.metadata)} 
+
)} - -
+ +