diff --git a/ui/litellm-dashboard/src/components/chat_ui.tsx b/ui/litellm-dashboard/src/components/chat_ui.tsx index b2cfc65ba2..d3c9428297 100644 --- a/ui/litellm-dashboard/src/components/chat_ui.tsx +++ b/ui/litellm-dashboard/src/components/chat_ui.tsx @@ -3,6 +3,8 @@ import { Card, Title, Table, TableHead, TableRow, TableCell, TableBody, Grid } f import { modelInfoCall } from "./networking"; import openai from "openai"; + + interface ChatUIProps { accessToken: string | null; token: string | null; @@ -10,15 +12,15 @@ interface ChatUIProps { userID: string | null; } -async function generateModelResponse(inputMessage: string, updateUI: (chunk: string) => void) { +async function generateModelResponse(inputMessage: string, updateUI: (chunk: string) => void, selectedModel: string, accessToken: string) { const client = new openai.OpenAI({ - apiKey: 'sk-1234', // Replace with your OpenAI API key + apiKey: accessToken, // Replace with your OpenAI API key baseURL: 'http://0.0.0.0:4000', // Replace with your OpenAI API base URL dangerouslyAllowBrowser: true, // using a temporary litellm proxy key }); const response = await client.chat.completions.create({ - model: 'azure-gpt-3.5', + model: selectedModel, stream: true, messages: [ { @@ -35,88 +37,117 @@ async function generateModelResponse(inputMessage: string, updateUI: (chunk: str } } } + const ChatUI: React.FC = ({ accessToken, token, userRole, userID }) => { const [inputMessage, setInputMessage] = useState(""); const [chatHistory, setChatHistory] = useState([]); + const [selectedModel, setSelectedModel] = useState(undefined); + const [modelInfo, setModelInfo] = useState(null); // Declare modelInfo at the component level + + useEffect(() => { + // Fetch model info and set the default selected model + const fetchModelInfo = async () => { + const fetchedModelInfo = await modelInfoCall(accessToken, userID, userRole); + console.log("model_info:", fetchedModelInfo); + + if (fetchedModelInfo?.data.length > 0) { + setModelInfo(fetchedModelInfo); + setSelectedModel(fetchedModelInfo.data[0].model_name); + } + }; + + fetchModelInfo(); + }, [accessToken, userID, userRole]); const updateUI = (role: string, chunk: string) => { - setChatHistory((prevHistory) => { - const lastMessage = prevHistory[prevHistory.length - 1]; - - // Check if the last message is from the same role - if (lastMessage && lastMessage.role === role) { - // Concatenate the new chunk to the existing message - return [ - ...prevHistory.slice(0, prevHistory.length - 1), - { role, content: lastMessage.content + chunk }, - ]; - } else { - // Append a new message if the last message is not from the same role - return [...prevHistory, { role, content: chunk }]; - } - }); - }; - - const handleSendMessage = async () => { - if (inputMessage.trim() === "") return; - - // Add the user's message to the chat history - setChatHistory((prevHistory) => [ - ...prevHistory, - { role: "user", content: inputMessage }, - ]); - - try { - await generateModelResponse(inputMessage, (chunk) => updateUI("assistant", chunk)); - } catch (error) { - console.error("Error fetching model response", error); - updateUI("assistant", "Error fetching model response"); + setChatHistory((prevHistory) => { + const lastMessage = prevHistory[prevHistory.length - 1]; + + if (lastMessage && lastMessage.role === role) { + return [ + ...prevHistory.slice(0, prevHistory.length - 1), + { role, content: lastMessage.content + chunk }, + ]; + } else { + return [...prevHistory, { role, content: chunk }]; } - - setInputMessage(""); - }; - + }); + }; - return ( -
- - - - - - - Chat - + const handleSendMessage = async () => { + if (inputMessage.trim() === "") return; + + setChatHistory((prevHistory) => [ + ...prevHistory, + { role: "user", content: inputMessage }, + ]); + + try { + if (selectedModel) { + await generateModelResponse(inputMessage, (chunk) => updateUI("assistant", chunk), selectedModel, accessToken); + } + } catch (error) { + console.error("Error fetching model response", error); + updateUI("assistant", "Error fetching model response"); + } + + setInputMessage(""); + }; + + return ( +
+ + +
+ + +
+
+ + + + Chat + + + + + {chatHistory.map((message, index) => ( + + {`${message.role}: ${message.content}`} - - - {chatHistory.map((message, index) => ( - - {`${message.role}: ${message.content}`} - - ))} - -
-
-
- setInputMessage(e.target.value)} - className="flex-1 p-2 border rounded-md mr-2" - placeholder="Type your message..." - /> - -
+ ))} + + +
+
+ setInputMessage(e.target.value)} + className="flex-1 p-2 border rounded-md mr-2" + placeholder="Type your message..." + /> +
- - -
- ); +
+
+
+
+ ); }; - -export default ChatUI; + + export default ChatUI; \ No newline at end of file