mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(ui) select a model to chat with
This commit is contained in:
parent
9d463e1d98
commit
3de9f5e860
1 changed files with 107 additions and 76 deletions
|
@ -3,6 +3,8 @@ import { Card, Title, Table, TableHead, TableRow, TableCell, TableBody, Grid } f
|
||||||
import { modelInfoCall } from "./networking";
|
import { modelInfoCall } from "./networking";
|
||||||
import openai from "openai";
|
import openai from "openai";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
interface ChatUIProps {
|
interface ChatUIProps {
|
||||||
accessToken: string | null;
|
accessToken: string | null;
|
||||||
token: string | null;
|
token: string | null;
|
||||||
|
@ -10,15 +12,15 @@ interface ChatUIProps {
|
||||||
userID: string | null;
|
userID: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateModelResponse(inputMessage: string, updateUI: (chunk: string) => void) {
|
async function generateModelResponse(inputMessage: string, updateUI: (chunk: string) => void, selectedModel: string, accessToken: string) {
|
||||||
const client = new openai.OpenAI({
|
const client = new openai.OpenAI({
|
||||||
apiKey: 'sk-1234', // Replace with your OpenAI API key
|
apiKey: accessToken, // Replace with your OpenAI API key
|
||||||
baseURL: 'http://0.0.0.0:4000', // Replace with your OpenAI API base URL
|
baseURL: 'http://0.0.0.0:4000', // Replace with your OpenAI API base URL
|
||||||
dangerouslyAllowBrowser: true, // using a temporary litellm proxy key
|
dangerouslyAllowBrowser: true, // using a temporary litellm proxy key
|
||||||
});
|
});
|
||||||
|
|
||||||
const response = await client.chat.completions.create({
|
const response = await client.chat.completions.create({
|
||||||
model: 'azure-gpt-3.5',
|
model: selectedModel,
|
||||||
stream: true,
|
stream: true,
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
|
@ -35,23 +37,38 @@ async function generateModelResponse(inputMessage: string, updateUI: (chunk: str
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const ChatUI: React.FC<ChatUIProps> = ({ accessToken, token, userRole, userID }) => {
|
const ChatUI: React.FC<ChatUIProps> = ({ accessToken, token, userRole, userID }) => {
|
||||||
const [inputMessage, setInputMessage] = useState("");
|
const [inputMessage, setInputMessage] = useState("");
|
||||||
const [chatHistory, setChatHistory] = useState<any[]>([]);
|
const [chatHistory, setChatHistory] = useState<any[]>([]);
|
||||||
|
const [selectedModel, setSelectedModel] = useState<string | undefined>(undefined);
|
||||||
|
const [modelInfo, setModelInfo] = useState<any | null>(null); // Declare modelInfo at the component level
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Fetch model info and set the default selected model
|
||||||
|
const fetchModelInfo = async () => {
|
||||||
|
const fetchedModelInfo = await modelInfoCall(accessToken, userID, userRole);
|
||||||
|
console.log("model_info:", fetchedModelInfo);
|
||||||
|
|
||||||
|
if (fetchedModelInfo?.data.length > 0) {
|
||||||
|
setModelInfo(fetchedModelInfo);
|
||||||
|
setSelectedModel(fetchedModelInfo.data[0].model_name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchModelInfo();
|
||||||
|
}, [accessToken, userID, userRole]);
|
||||||
|
|
||||||
const updateUI = (role: string, chunk: string) => {
|
const updateUI = (role: string, chunk: string) => {
|
||||||
setChatHistory((prevHistory) => {
|
setChatHistory((prevHistory) => {
|
||||||
const lastMessage = prevHistory[prevHistory.length - 1];
|
const lastMessage = prevHistory[prevHistory.length - 1];
|
||||||
|
|
||||||
// Check if the last message is from the same role
|
|
||||||
if (lastMessage && lastMessage.role === role) {
|
if (lastMessage && lastMessage.role === role) {
|
||||||
// Concatenate the new chunk to the existing message
|
|
||||||
return [
|
return [
|
||||||
...prevHistory.slice(0, prevHistory.length - 1),
|
...prevHistory.slice(0, prevHistory.length - 1),
|
||||||
{ role, content: lastMessage.content + chunk },
|
{ role, content: lastMessage.content + chunk },
|
||||||
];
|
];
|
||||||
} else {
|
} else {
|
||||||
// Append a new message if the last message is not from the same role
|
|
||||||
return [...prevHistory, { role, content: chunk }];
|
return [...prevHistory, { role, content: chunk }];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -60,14 +77,15 @@ const ChatUI: React.FC<ChatUIProps> = ({ accessToken, token, userRole, userID })
|
||||||
const handleSendMessage = async () => {
|
const handleSendMessage = async () => {
|
||||||
if (inputMessage.trim() === "") return;
|
if (inputMessage.trim() === "") return;
|
||||||
|
|
||||||
// Add the user's message to the chat history
|
|
||||||
setChatHistory((prevHistory) => [
|
setChatHistory((prevHistory) => [
|
||||||
...prevHistory,
|
...prevHistory,
|
||||||
{ role: "user", content: inputMessage },
|
{ role: "user", content: inputMessage },
|
||||||
]);
|
]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await generateModelResponse(inputMessage, (chunk) => updateUI("assistant", chunk));
|
if (selectedModel) {
|
||||||
|
await generateModelResponse(inputMessage, (chunk) => updateUI("assistant", chunk), selectedModel, accessToken);
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching model response", error);
|
console.error("Error fetching model response", error);
|
||||||
updateUI("assistant", "Error fetching model response");
|
updateUI("assistant", "Error fetching model response");
|
||||||
|
@ -76,11 +94,24 @@ const ChatUI: React.FC<ChatUIProps> = ({ accessToken, token, userRole, userID })
|
||||||
setInputMessage("");
|
setInputMessage("");
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{ width: "100%", position: "relative" }}>
|
<div style={{ width: "100%", position: "relative" }}>
|
||||||
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
||||||
<Card>
|
<Card>
|
||||||
|
<div>
|
||||||
|
<label>Select Model:</label>
|
||||||
|
<select
|
||||||
|
value={selectedModel || ""}
|
||||||
|
onChange={(e) => setSelectedModel(e.target.value)}
|
||||||
|
>
|
||||||
|
{/* Populate dropdown options from available models */}
|
||||||
|
{modelInfo?.data.map((element: { model_name: string }) => (
|
||||||
|
<option key={element.model_name} value={element.model_name}>
|
||||||
|
{element.model_name}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
<Table className="mt-5" style={{ display: "block", maxHeight: "60vh", overflowY: "auto" }}>
|
<Table className="mt-5" style={{ display: "block", maxHeight: "60vh", overflowY: "auto" }}>
|
||||||
<TableHead>
|
<TableHead>
|
||||||
<TableRow>
|
<TableRow>
|
||||||
|
@ -119,4 +150,4 @@ const ChatUI: React.FC<ChatUIProps> = ({ accessToken, token, userRole, userID })
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
export default ChatUI;
|
export default ChatUI;
|
Loading…
Add table
Add a link
Reference in a new issue