diff --git a/ui/litellm-dashboard/src/components/chat_ui.tsx b/ui/litellm-dashboard/src/components/chat_ui.tsx index 0231f52325..8a48608bde 100644 --- a/ui/litellm-dashboard/src/components/chat_ui.tsx +++ b/ui/litellm-dashboard/src/components/chat_ui.tsx @@ -20,18 +20,28 @@ import { SelectItem, TextInput, Button, + Divider, } from "@tremor/react"; -import { message, Select } from "antd"; +import { message, Select, Spin, Typography, Tooltip } from "antd"; import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion"; import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation"; import { fetchAvailableModels, ModelGroup } from "./chat_ui/llm_calls/fetch_models"; import { litellmModeMapping, ModelMode, EndpointType, getEndpointType } from "./chat_ui/mode_endpoint_mapping"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; -import { Typography } from "antd"; import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism'; import EndpointSelector from "./chat_ui/EndpointSelector"; import { determineEndpointType } from "./chat_ui/EndpointUtils"; +import { + SendOutlined, + ApiOutlined, + KeyOutlined, + ClearOutlined, + RobotOutlined, + UserOutlined, + DeleteOutlined, + LoadingOutlined +} from "@ant-design/icons"; interface ChatUIProps { accessToken: string | null; @@ -61,6 +71,8 @@ const ChatUI: React.FC = ({ const [modelInfo, setModelInfo] = useState([]); const customModelTimeout = useRef(null); const [endpointType, setEndpointType] = useState(EndpointType.CHAT); + const [isLoading, setIsLoading] = useState(false); + const abortControllerRef = useRef(null); const chatEndRef = useRef(null); @@ -141,6 +153,15 @@ const ChatUI: React.FC = ({ } }; + const handleCancelRequest = () => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + abortControllerRef.current = null; + setIsLoading(false); + message.info("Request cancelled"); + } + }; + const handleSendMessage = async () => { if (inputMessage.trim() === "") return; @@ -155,11 +176,16 @@ const ChatUI: React.FC = ({ return; } + // Create new abort controller for this request + abortControllerRef.current = new AbortController(); + const signal = abortControllerRef.current.signal; + // Create message object without model field for API call const newUserMessage = { role: "user", content: inputMessage }; // Update UI with full message object setChatHistory([...chatHistory, newUserMessage]); + setIsLoading(true); try { if (selectedModel) { @@ -172,7 +198,8 @@ const ChatUI: React.FC = ({ apiChatHistory, (chunk, model) => updateTextUI("assistant", chunk, model), selectedModel, - effectiveApiKey + effectiveApiKey, + signal ); } else if (endpointType === EndpointType.IMAGE) { // For image generation @@ -180,13 +207,21 @@ const ChatUI: React.FC = ({ inputMessage, (imageUrl, model) => updateImageUI(imageUrl, model), selectedModel, - effectiveApiKey + effectiveApiKey, + signal ); } } } catch (error) { - console.error("Error fetching response", error); - updateTextUI("assistant", "Error fetching response"); + if (signal.aborted) { + console.log("Request was cancelled"); + } else { + console.error("Error fetching response", error); + updateTextUI("assistant", "Error fetching response"); + } + } finally { + setIsLoading(false); + abortControllerRef.current = null; } setInputMessage(""); @@ -224,128 +259,144 @@ const ChatUI: React.FC = ({ setEndpointType(value); }; + const antIcon = ; + return ( -
-
+
+ +
{/* Left Sidebar with Controls */} -
+
- API Key Source - setApiKeySource(value as "session" | "custom")} + options={[ + { value: 'session', label: 'Current UI Session' }, + { value: 'custom', label: 'Virtual Key' }, + ]} + className="rounded-md" + /> + {apiKeySource === 'custom' && ( + + )} +
+ +
+ + Select Model + + ({ - value: option.model_group, - label: option.model_group - })), - { value: 'custom', label: 'Enter custom model' } - ]} - style={{ width: "100%" }} - showSearch={true} - /> - {showCustomModelInput && ( - { - // Using setTimeout to create a simple debounce effect - if (customModelTimeout.current) { - clearTimeout(customModelTimeout.current); - } - - customModelTimeout.current = setTimeout(() => { - setSelectedModel(value); - }, 500); // 500ms delay after typing stops - }} - /> - )} -
- -
- -
- -
{/* Main Chat Area */} -
-
+
+
+ {chatHistory.length === 0 && ( +
+ + Start a conversation or generate an image +
+ )} + {chatHistory.map((message, index) => (
-
-
- {message.role} +
+
+ {message.role === "user" ? + : + + } +
+ {message.role} {message.role === "assistant" && message.model && ( - + {message.model} )}
-
+
{message.isImage ? ( Generated image ) : ( = ({ style={coy as any} language={match[1]} PreTag="div" + className="rounded-md my-2" {...props} > {String(children).replace(/\n$/, '')} ) : ( - + {children} ); @@ -379,11 +431,16 @@ const ChatUI: React.FC = ({
))} + {isLoading && ( +
+ +
+ )}
-
-
+
+
= ({ ? "Type your message..." : "Describe the image you want to generate..." } + disabled={isLoading} + className="flex-1" /> - + {isLoading ? ( + + ) : ( + + )}
+
); }; diff --git a/ui/litellm-dashboard/src/components/chat_ui/EndpointSelector.tsx b/ui/litellm-dashboard/src/components/chat_ui/EndpointSelector.tsx index a0f23da98d..49b1df3e97 100644 --- a/ui/litellm-dashboard/src/components/chat_ui/EndpointSelector.tsx +++ b/ui/litellm-dashboard/src/components/chat_ui/EndpointSelector.tsx @@ -28,9 +28,10 @@ const EndpointSelector: React.FC = ({ Endpoint Type: