add makeOpenAIResponsesRequest

This commit is contained in:
Ishaan Jaff 2025-04-19 09:57:31 -07:00
parent 88b6cb6c28
commit dc67f4bc18
2 changed files with 19 additions and 1 deletions

View file

@ -26,6 +26,7 @@ import {
import { message, Select, Spin, Typography, Tooltip, Input } from "antd";
import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion";
import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation";
import { makeOpenAIResponsesRequest } from "./chat_ui/llm_calls/responses_api";
import { fetchAvailableModels, ModelGroup } from "./chat_ui/llm_calls/fetch_models";
import { litellmModeMapping, ModelMode, EndpointType, getEndpointType } from "./chat_ui/mode_endpoint_mapping";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
@ -297,7 +298,6 @@ const ChatUI: React.FC<ChatUIProps> = ({
try {
if (selectedModel) {
// Use EndpointType enum for comparison
if (endpointType === EndpointType.CHAT) {
// Create chat history for API call - strip out model field and isImage field
const apiChatHistory = [...chatHistory.filter(msg => !msg.isImage).map(({ role, content }) => ({ role, content })), newUserMessage];
@ -323,6 +323,21 @@ const ChatUI: React.FC<ChatUIProps> = ({
selectedTags,
signal
);
} else if (endpointType === EndpointType.RESPONSES) {
// Create chat history for API call - strip out model field and isImage field
const apiChatHistory = [...chatHistory.filter(msg => !msg.isImage).map(({ role, content }) => ({ role, content })), newUserMessage];
await makeOpenAIResponsesRequest(
apiChatHistory,
(chunk, model) => updateTextUI("assistant", chunk, model),
selectedModel,
effectiveApiKey,
selectedTags,
signal,
updateReasoningContent,
updateTimingData,
updateUsageData
);
}
}
} catch (error) {

View file

@ -4,6 +4,7 @@
export enum ModelMode {
IMAGE_GENERATION = "image_generation",
CHAT = "chat",
RESPONSES = "responses",
// add additional modes as needed
}
@ -11,6 +12,7 @@ export enum ModelMode {
export enum EndpointType {
IMAGE = "image",
CHAT = "chat",
RESPONSES = "responses",
// add additional endpoint types if required
}
@ -18,6 +20,7 @@ export enum ModelMode {
export const litellmModeMapping: Record<ModelMode, EndpointType> = {
[ModelMode.IMAGE_GENERATION]: EndpointType.IMAGE,
[ModelMode.CHAT]: EndpointType.CHAT,
[ModelMode.RESPONSES]: EndpointType.RESPONSES,
};
export const getEndpointType = (mode: string): EndpointType => {