import React, { useState, useEffect, useRef } from "react"; import ReactMarkdown from "react-markdown"; import { Card, Title, Table, TableHead, TableRow, TableCell, TableBody, Grid, Tab, TabGroup, TabList, TabPanel, TabPanels, Metric, Col, Text, SelectItem, TextInput, Button, Divider, } from "@tremor/react"; import { message, Select, Spin, Typography, Tooltip, Input } from "antd"; import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion"; import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation"; import { fetchAvailableModels, ModelGroup } from "./chat_ui/llm_calls/fetch_models"; import { litellmModeMapping, ModelMode, EndpointType, getEndpointType } from "./chat_ui/mode_endpoint_mapping"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism'; import EndpointSelector from "./chat_ui/EndpointSelector"; import TagSelector from "./tag_management/TagSelector"; import { determineEndpointType } from "./chat_ui/EndpointUtils"; import { MessageType } from "./chat_ui/types"; import ReasoningContent from "./chat_ui/ReasoningContent"; import { SendOutlined, ApiOutlined, KeyOutlined, ClearOutlined, RobotOutlined, UserOutlined, DeleteOutlined, LoadingOutlined, TagsOutlined } from "@ant-design/icons"; const { TextArea } = Input; interface ChatUIProps { accessToken: string | null; token: string | null; userRole: string | null; userID: string | null; disabledPersonalKeyCreation: boolean; } const ChatUI: React.FC = ({ accessToken, token, userRole, userID, disabledPersonalKeyCreation, }) => { const [apiKeySource, setApiKeySource] = useState<'session' | 'custom'>( disabledPersonalKeyCreation ? 'custom' : 'session' ); const [apiKey, setApiKey] = useState(""); const [inputMessage, setInputMessage] = useState(""); const [chatHistory, setChatHistory] = useState([]); const [selectedModel, setSelectedModel] = useState( undefined ); const [showCustomModelInput, setShowCustomModelInput] = useState(false); const [modelInfo, setModelInfo] = useState([]); const customModelTimeout = useRef(null); const [endpointType, setEndpointType] = useState(EndpointType.CHAT); const [isLoading, setIsLoading] = useState(false); const abortControllerRef = useRef(null); const [selectedTags, setSelectedTags] = useState([]); const chatEndRef = useRef(null); useEffect(() => { let userApiKey = apiKeySource === 'session' ? accessToken : apiKey; if (!userApiKey || !token || !userRole || !userID) { console.log("userApiKey or token or userRole or userID is missing = ", userApiKey, token, userRole, userID); return; } // Fetch model info and set the default selected model const loadModels = async () => { try { if (!userApiKey) { console.log("userApiKey is missing"); return; } const uniqueModels = await fetchAvailableModels( userApiKey, ); console.log("Fetched models:", uniqueModels); if (uniqueModels.length > 0) { setModelInfo(uniqueModels); setSelectedModel(uniqueModels[0].model_group); // Auto-set endpoint based on the first model's mode if (uniqueModels[0].mode) { const initialEndpointType = determineEndpointType(uniqueModels[0].model_group, uniqueModels); setEndpointType(initialEndpointType); } } } catch (error) { console.error("Error fetching model info:", error); } }; loadModels(); }, [accessToken, userID, userRole, apiKeySource, apiKey]); useEffect(() => { // Scroll to the bottom of the chat whenever chatHistory updates if (chatEndRef.current) { // Add a small delay to ensure content is rendered setTimeout(() => { chatEndRef.current?.scrollIntoView({ behavior: "smooth", block: "end" // Keep the scroll position at the end }); }, 100); } }, [chatHistory]); const updateTextUI = (role: string, chunk: string, model?: string) => { setChatHistory((prevHistory) => { const lastMessage = prevHistory[prevHistory.length - 1]; if (lastMessage && lastMessage.role === role && !lastMessage.isImage) { return [ ...prevHistory.slice(0, prevHistory.length - 1), { ...lastMessage, content: lastMessage.content + chunk, model }, ]; } else { return [...prevHistory, { role, content: chunk, model }]; } }); }; const updateReasoningContent = (chunk: string) => { setChatHistory((prevHistory) => { const lastMessage = prevHistory[prevHistory.length - 1]; if (lastMessage && lastMessage.role === "assistant" && !lastMessage.isImage) { return [ ...prevHistory.slice(0, prevHistory.length - 1), { ...lastMessage, reasoningContent: (lastMessage.reasoningContent || "") + chunk }, ]; } else { // If there's no assistant message yet, we'll create one with empty content // but with reasoning content if (prevHistory.length > 0 && prevHistory[prevHistory.length - 1].role === "user") { return [ ...prevHistory, { role: "assistant", content: "", reasoningContent: chunk } ]; } return prevHistory; } }); }; const updateImageUI = (imageUrl: string, model: string) => { setChatHistory((prevHistory) => [ ...prevHistory, { role: "assistant", content: imageUrl, model, isImage: true } ]); }; const handleKeyDown = (event: React.KeyboardEvent) => { if (event.key === 'Enter' && !event.shiftKey) { event.preventDefault(); // Prevent default to avoid newline handleSendMessage(); } // If Shift+Enter is pressed, the default behavior (inserting a newline) will occur }; const handleCancelRequest = () => { if (abortControllerRef.current) { abortControllerRef.current.abort(); abortControllerRef.current = null; setIsLoading(false); message.info("Request cancelled"); } }; const handleSendMessage = async () => { if (inputMessage.trim() === "") return; if (!token || !userRole || !userID) { return; } const effectiveApiKey = apiKeySource === 'session' ? accessToken : apiKey; if (!effectiveApiKey) { message.error("Please provide an API key or select Current UI Session"); return; } // Create new abort controller for this request abortControllerRef.current = new AbortController(); const signal = abortControllerRef.current.signal; // Create message object without model field for API call const newUserMessage = { role: "user", content: inputMessage }; // Update UI with full message object setChatHistory([...chatHistory, newUserMessage]); setIsLoading(true); try { if (selectedModel) { // Use EndpointType enum for comparison if (endpointType === EndpointType.CHAT) { // Create chat history for API call - strip out model field and isImage field const apiChatHistory = [...chatHistory.filter(msg => !msg.isImage).map(({ role, content }) => ({ role, content })), newUserMessage]; await makeOpenAIChatCompletionRequest( apiChatHistory, (chunk, model) => updateTextUI("assistant", chunk, model), selectedModel, effectiveApiKey, selectedTags, signal, updateReasoningContent ); } else if (endpointType === EndpointType.IMAGE) { // For image generation await makeOpenAIImageGenerationRequest( inputMessage, (imageUrl, model) => updateImageUI(imageUrl, model), selectedModel, effectiveApiKey, selectedTags, signal ); } } } catch (error) { if (signal.aborted) { console.log("Request was cancelled"); } else { console.error("Error fetching response", error); updateTextUI("assistant", "Error fetching response"); } } finally { setIsLoading(false); abortControllerRef.current = null; } setInputMessage(""); }; const clearChatHistory = () => { setChatHistory([]); message.success("Chat history cleared."); }; if (userRole && userRole === "Admin Viewer") { const { Title, Paragraph } = Typography; return (
Access Denied Ask your proxy admin for access to test models
); } const onModelChange = (value: string) => { console.log(`selected ${value}`); setSelectedModel(value); // Use the utility function to determine the endpoint type if (value !== 'custom') { const newEndpointType = determineEndpointType(value, modelInfo); setEndpointType(newEndpointType); } setShowCustomModelInput(value === 'custom'); }; const handleEndpointChange = (value: string) => { setEndpointType(value); }; const antIcon = ; return (
{/* Left Sidebar with Controls */}
API Key Source ({ value: option.model_group, label: option.model_group })), { value: 'custom', label: 'Enter custom model' } ]} style={{ width: "100%" }} showSearch={true} className="rounded-md" /> {showCustomModelInput && ( { // Using setTimeout to create a simple debounce effect if (customModelTimeout.current) { clearTimeout(customModelTimeout.current); } customModelTimeout.current = setTimeout(() => { setSelectedModel(value); }, 500); // 500ms delay after typing stops }} /> )}
Endpoint Type
Tags
{/* Main Chat Area */}
{chatHistory.length === 0 && (
Start a conversation or generate an image
)} {chatHistory.map((message, index) => (
{message.role === "user" ? : }
{message.role} {message.role === "assistant" && message.model && ( {message.model} )}
{message.reasoningContent && ( )}
{message.isImage ? ( Generated image ) : ( & { inline?: boolean; node?: any; }) { const match = /language-(\w+)/.exec(className || ''); return !inline && match ? ( {String(children).replace(/\n$/, '')} ) : ( {children} ); }, pre: ({ node, ...props }) => (
                          )
                        }}
                      >
                        {message.content}
                      
                    )}
                  
))} {isLoading && (
)}