use 1 file for fetch model options

This commit is contained in:
Ishaan Jaff 2025-04-03 19:27:44 -07:00
parent 6ffe3f1e46
commit b361329e07
2 changed files with 61 additions and 23 deletions

View file

@ -23,9 +23,9 @@ import {
} from "@tremor/react"; } from "@tremor/react";
import { message, Select } from "antd"; import { message, Select } from "antd";
import { modelAvailableCall } from "./networking";
import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion"; import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion";
import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation"; import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation";
import { fetchAvailableModels } from "./chat_ui/llm_calls/fetch_models";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Typography } from "antd"; import { Typography } from "antd";
import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism'; import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism';
@ -70,33 +70,17 @@ const ChatUI: React.FC<ChatUIProps> = ({
} }
// Fetch model info and set the default selected model // Fetch model info and set the default selected model
const fetchModelInfo = async () => { const loadModels = async () => {
try { try {
const fetchedAvailableModels = await modelAvailableCall( const uniqueModels = await fetchAvailableModels(
useApiKey ?? '', // Use empty string if useApiKey is null, useApiKey,
userID, userID,
userRole userRole
); );
console.log("model_info:", fetchedAvailableModels); console.log("Fetched models:", uniqueModels);
if (fetchedAvailableModels?.data.length > 0) { if (uniqueModels.length > 0) {
// Create a Map to store unique models using the model ID as key
const uniqueModelsMap = new Map();
fetchedAvailableModels["data"].forEach((item: { id: string }) => {
uniqueModelsMap.set(item.id, {
value: item.id,
label: item.id
});
});
// Convert Map values back to array
const uniqueModels = Array.from(uniqueModelsMap.values());
// Sort models alphabetically
uniqueModels.sort((a, b) => a.label.localeCompare(b.label));
setModelInfo(uniqueModels); setModelInfo(uniqueModels);
setSelectedModel(uniqueModels[0].value); setSelectedModel(uniqueModels[0].value);
} }
@ -105,7 +89,7 @@ const ChatUI: React.FC<ChatUIProps> = ({
} }
}; };
fetchModelInfo(); loadModels();
}, [accessToken, userID, userRole, apiKeySource, apiKey]); }, [accessToken, userID, userRole, apiKeySource, apiKey]);

View file

@ -0,0 +1,54 @@
import { modelAvailableCall } from "../../networking";
interface ModelOption {
value: string;
label: string;
}
/**
* Fetches available models for the user and formats them as options
* for selection dropdowns
*/
export const fetchAvailableModels = async (
apiKey: string | null,
userID: string,
userRole: string,
teamID: string | null = null
): Promise<ModelOption[]> => {
try {
const fetchedAvailableModels = await modelAvailableCall(
apiKey ?? '', // Use empty string if apiKey is null
userID,
userRole,
false,
teamID
);
console.log("model_info:", fetchedAvailableModels);
if (fetchedAvailableModels?.data.length > 0) {
// Create a Map to store unique models using the model ID as key
const uniqueModelsMap = new Map();
fetchedAvailableModels["data"].forEach((item: { id: string }) => {
uniqueModelsMap.set(item.id, {
value: item.id,
label: item.id
});
});
// Convert Map values back to array
const uniqueModels = Array.from(uniqueModelsMap.values());
// Sort models alphabetically
uniqueModels.sort((a, b) => a.label.localeCompare(b.label));
return uniqueModels;
}
return [];
} catch (error) {
console.error("Error fetching model info:", error);
throw error;
}
};