diff --git a/ui/litellm-dashboard/src/components/chat_ui.tsx b/ui/litellm-dashboard/src/components/chat_ui.tsx index 6511a58e90..43bde08a70 100644 --- a/ui/litellm-dashboard/src/components/chat_ui.tsx +++ b/ui/litellm-dashboard/src/components/chat_ui.tsx @@ -14,10 +14,17 @@ import { TabList, TabPanel, Metric, - Select, + Col, + Text, SelectItem, + TextInput, TabPanels, + Button, } from "@tremor/react"; + + + +import { message, Select } from "antd"; import { modelAvailableCall } from "./networking"; import openai from "openai"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; @@ -48,59 +55,84 @@ async function generateModelResponse( dangerouslyAllowBrowser: true, // using a temporary litellm proxy key }); - const response = await client.chat.completions.create({ - model: selectedModel, - stream: true, - messages: [ - { - role: "user", - content: inputMessage, - }, - ], - }); + try { + const response = await client.chat.completions.create({ + model: selectedModel, + stream: true, + messages: [ + { + role: "user", + content: inputMessage, + }, + ], + }); - for await (const chunk of response) { - console.log(chunk); - if (chunk.choices[0].delta.content) { - updateUI(chunk.choices[0].delta.content); + for await (const chunk of response) { + console.log(chunk); + if (chunk.choices[0].delta.content) { + updateUI(chunk.choices[0].delta.content); + } } + } catch (error) { + message.error(`Error occurred while generating model response. Please try again. Error: ${error}`); } } + const ChatUI: React.FC = ({ accessToken, token, userRole, userID, }) => { + const [apiKey, setApiKey] = useState(""); const [inputMessage, setInputMessage] = useState(""); const [chatHistory, setChatHistory] = useState([]); const [selectedModel, setSelectedModel] = useState( undefined ); - const [modelInfo, setModelInfo] = useState(null); // Declare modelInfo at the component level + const [modelInfo, setModelInfo] = useState([]);// Declare modelInfo at the component level useEffect(() => { if (!accessToken || !token || !userRole || !userID) { return; } + + + // Fetch model info and set the default selected model const fetchModelInfo = async () => { - const fetchedAvailableModels = await modelAvailableCall( - accessToken, - userID, - userRole - ); - console.log("model_info:", fetchedAvailableModels); - - if (fetchedAvailableModels?.data.length > 0) { - setModelInfo(fetchedAvailableModels.data); - setSelectedModel(fetchedAvailableModels.data[0].id); + try { + const fetchedAvailableModels = await modelAvailableCall( + accessToken, + userID, + userRole + ); + + console.log("model_info:", fetchedAvailableModels); + + if (fetchedAvailableModels?.data.length > 0) { + const options = fetchedAvailableModels["data"].map(item => ({ + value: item.id, // Replace 'value' with the actual property name you want to use as the value + label: item.id // Replace 'label' with the actual property name you want to use as the label + })); + + // Now, 'options' contains the list you wanted + console.log(options); // You can log it to verify the list + + // setModelInfo(options) should be inside the if block to avoid setting it when no data is available + setModelInfo(options); + setSelectedModel(fetchedAvailableModels.data[0].id); + } + } catch (error) { + console.error("Error fetching model info:", error); + // Handle error as needed } }; - + fetchModelInfo(); }, [accessToken, userID, userRole]); + const updateUI = (role: string, chunk: string) => { setChatHistory((prevHistory) => { @@ -120,7 +152,7 @@ const ChatUI: React.FC = ({ const handleSendMessage = async () => { if (inputMessage.trim() === "") return; - if (!accessToken || !token || !userRole || !userID) { + if (!apiKey || !token || !userRole || !userID) { return; } @@ -135,7 +167,7 @@ const ChatUI: React.FC = ({ inputMessage, (chunk) => updateUI("assistant", chunk), selectedModel, - accessToken + apiKey ); } } catch (error) { @@ -156,32 +188,47 @@ const ChatUI: React.FC = ({ ); } + const onChange = (value: string) => { + console.log(`selected ${value}`); + setSelectedModel(value); + }; + return (
- + + - + Chat API Reference -
- - -
+
+ + + API Key + + + + Select Model: + + setInputMessage(e.target.value)} - className="flex-1 p-2 border rounded-md mr-2" placeholder="Type your message..." /> - +