mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
use 1 file for fetch model options
This commit is contained in:
parent
6ffe3f1e46
commit
b361329e07
2 changed files with 61 additions and 23 deletions
|
@ -23,9 +23,9 @@ import {
|
|||
} from "@tremor/react";
|
||||
|
||||
import { message, Select } from "antd";
|
||||
import { modelAvailableCall } from "./networking";
|
||||
import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion";
|
||||
import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation";
|
||||
import { fetchAvailableModels } from "./chat_ui/llm_calls/fetch_models";
|
||||
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
|
||||
import { Typography } from "antd";
|
||||
import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism';
|
||||
|
@ -70,33 +70,17 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
}
|
||||
|
||||
// Fetch model info and set the default selected model
|
||||
const fetchModelInfo = async () => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
const fetchedAvailableModels = await modelAvailableCall(
|
||||
useApiKey ?? '', // Use empty string if useApiKey is null,
|
||||
const uniqueModels = await fetchAvailableModels(
|
||||
useApiKey,
|
||||
userID,
|
||||
userRole
|
||||
);
|
||||
|
||||
console.log("model_info:", fetchedAvailableModels);
|
||||
|
||||
if (fetchedAvailableModels?.data.length > 0) {
|
||||
// Create a Map to store unique models using the model ID as key
|
||||
const uniqueModelsMap = new Map();
|
||||
|
||||
fetchedAvailableModels["data"].forEach((item: { id: string }) => {
|
||||
uniqueModelsMap.set(item.id, {
|
||||
value: item.id,
|
||||
label: item.id
|
||||
});
|
||||
});
|
||||
|
||||
// Convert Map values back to array
|
||||
const uniqueModels = Array.from(uniqueModelsMap.values());
|
||||
|
||||
// Sort models alphabetically
|
||||
uniqueModels.sort((a, b) => a.label.localeCompare(b.label));
|
||||
console.log("Fetched models:", uniqueModels);
|
||||
|
||||
if (uniqueModels.length > 0) {
|
||||
setModelInfo(uniqueModels);
|
||||
setSelectedModel(uniqueModels[0].value);
|
||||
}
|
||||
|
@ -105,7 +89,7 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
}
|
||||
};
|
||||
|
||||
fetchModelInfo();
|
||||
loadModels();
|
||||
}, [accessToken, userID, userRole, apiKeySource, apiKey]);
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
import { modelAvailableCall } from "../../networking";
|
||||
|
||||
interface ModelOption {
|
||||
value: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches available models for the user and formats them as options
|
||||
* for selection dropdowns
|
||||
*/
|
||||
export const fetchAvailableModels = async (
|
||||
apiKey: string | null,
|
||||
userID: string,
|
||||
userRole: string,
|
||||
teamID: string | null = null
|
||||
): Promise<ModelOption[]> => {
|
||||
try {
|
||||
const fetchedAvailableModels = await modelAvailableCall(
|
||||
apiKey ?? '', // Use empty string if apiKey is null
|
||||
userID,
|
||||
userRole,
|
||||
false,
|
||||
teamID
|
||||
);
|
||||
|
||||
console.log("model_info:", fetchedAvailableModels);
|
||||
|
||||
if (fetchedAvailableModels?.data.length > 0) {
|
||||
// Create a Map to store unique models using the model ID as key
|
||||
const uniqueModelsMap = new Map();
|
||||
|
||||
fetchedAvailableModels["data"].forEach((item: { id: string }) => {
|
||||
uniqueModelsMap.set(item.id, {
|
||||
value: item.id,
|
||||
label: item.id
|
||||
});
|
||||
});
|
||||
|
||||
// Convert Map values back to array
|
||||
const uniqueModels = Array.from(uniqueModelsMap.values());
|
||||
|
||||
// Sort models alphabetically
|
||||
uniqueModels.sort((a, b) => a.label.localeCompare(b.label));
|
||||
|
||||
return uniqueModels;
|
||||
}
|
||||
|
||||
return [];
|
||||
} catch (error) {
|
||||
console.error("Error fetching model info:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue