mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
feat(UI): Implementing File Upload and VectorDB Creation/Configuration in Playground (#3266)
Some checks failed
Integration Tests (Replay) / Integration Tests (, , , client=, vision=) (push) Failing after 2s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Python Package Build Test / build (3.13) (push) Failing after 1s
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 5s
Pre-commit / pre-commit (push) Failing after 3s
Unit Tests / unit-tests (3.12) (push) Failing after 1s
Vector IO Integration Tests / test-matrix (push) Failing after 5s
Test External API and Providers / test-external (venv) (push) Failing after 4s
Python Package Build Test / build (3.12) (push) Failing after 5s
Update ReadTheDocs / update-readthedocs (push) Failing after 2s
Unit Tests / unit-tests (3.13) (push) Failing after 5s
UI Tests / ui-tests (22) (push) Failing after 6s
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 12s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 13s
Some checks failed
Integration Tests (Replay) / Integration Tests (, , , client=, vision=) (push) Failing after 2s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Python Package Build Test / build (3.13) (push) Failing after 1s
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 5s
Pre-commit / pre-commit (push) Failing after 3s
Unit Tests / unit-tests (3.12) (push) Failing after 1s
Vector IO Integration Tests / test-matrix (push) Failing after 5s
Test External API and Providers / test-external (venv) (push) Failing after 4s
Python Package Build Test / build (3.12) (push) Failing after 5s
Update ReadTheDocs / update-readthedocs (push) Failing after 2s
Unit Tests / unit-tests (3.13) (push) Failing after 5s
UI Tests / ui-tests (22) (push) Failing after 6s
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 12s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 13s
This commit is contained in:
parent
1a9fa3c0b8
commit
75fad445a6
8 changed files with 1953 additions and 201 deletions
610
llama_stack/ui/app/chat-playground/chunk-processor.test.tsx
Normal file
610
llama_stack/ui/app/chat-playground/chunk-processor.test.tsx
Normal file
|
@ -0,0 +1,610 @@
|
|||
import { describe, test, expect } from "@jest/globals";
|
||||
|
||||
// Extract the exact processChunk function implementation for testing
|
||||
function createProcessChunk() {
|
||||
return (chunk: unknown): { text: string | null; isToolCall: boolean } => {
|
||||
const chunkObj = chunk as Record<string, unknown>;
|
||||
|
||||
// Helper function to check if content contains function call JSON
|
||||
const containsToolCall = (content: string): boolean => {
|
||||
return (
|
||||
content.includes('"type": "function"') ||
|
||||
content.includes('"name": "knowledge_search"') ||
|
||||
content.includes('"parameters":') ||
|
||||
!!content.match(/\{"type":\s*"function".*?\}/)
|
||||
);
|
||||
};
|
||||
|
||||
// Check if this chunk contains a tool call (function call)
|
||||
let isToolCall = false;
|
||||
|
||||
// Check direct chunk content if it's a string
|
||||
if (typeof chunk === "string") {
|
||||
isToolCall = containsToolCall(chunk);
|
||||
}
|
||||
|
||||
// Check delta structures
|
||||
if (
|
||||
chunkObj?.delta &&
|
||||
typeof chunkObj.delta === "object" &&
|
||||
chunkObj.delta !== null
|
||||
) {
|
||||
const delta = chunkObj.delta as Record<string, unknown>;
|
||||
if ("tool_calls" in delta) {
|
||||
isToolCall = true;
|
||||
}
|
||||
if (typeof delta.text === "string") {
|
||||
if (containsToolCall(delta.text)) {
|
||||
isToolCall = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check event structures
|
||||
if (
|
||||
chunkObj?.event &&
|
||||
typeof chunkObj.event === "object" &&
|
||||
chunkObj.event !== null
|
||||
) {
|
||||
const event = chunkObj.event as Record<string, unknown>;
|
||||
|
||||
// Check event payload
|
||||
if (
|
||||
event?.payload &&
|
||||
typeof event.payload === "object" &&
|
||||
event.payload !== null
|
||||
) {
|
||||
const payload = event.payload as Record<string, unknown>;
|
||||
if (typeof payload.content === "string") {
|
||||
if (containsToolCall(payload.content)) {
|
||||
isToolCall = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check payload delta
|
||||
if (
|
||||
payload?.delta &&
|
||||
typeof payload.delta === "object" &&
|
||||
payload.delta !== null
|
||||
) {
|
||||
const delta = payload.delta as Record<string, unknown>;
|
||||
if (typeof delta.text === "string") {
|
||||
if (containsToolCall(delta.text)) {
|
||||
isToolCall = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check event delta
|
||||
if (
|
||||
event?.delta &&
|
||||
typeof event.delta === "object" &&
|
||||
event.delta !== null
|
||||
) {
|
||||
const delta = event.delta as Record<string, unknown>;
|
||||
if (typeof delta.text === "string") {
|
||||
if (containsToolCall(delta.text)) {
|
||||
isToolCall = true;
|
||||
}
|
||||
}
|
||||
if (typeof delta.content === "string") {
|
||||
if (containsToolCall(delta.content)) {
|
||||
isToolCall = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if it's a tool call, skip it (don't display in chat)
|
||||
if (isToolCall) {
|
||||
return { text: null, isToolCall: true };
|
||||
}
|
||||
|
||||
// Extract text content from various chunk formats
|
||||
let text: string | null = null;
|
||||
|
||||
// Helper function to extract clean text content, filtering out function calls
|
||||
const extractCleanText = (content: string): string | null => {
|
||||
if (containsToolCall(content)) {
|
||||
try {
|
||||
// Try to parse and extract non-function call parts
|
||||
const jsonMatch = content.match(
|
||||
/\{"type":\s*"function"[^}]*\}[^}]*\}/
|
||||
);
|
||||
if (jsonMatch) {
|
||||
const jsonPart = jsonMatch[0];
|
||||
const parsedJson = JSON.parse(jsonPart);
|
||||
|
||||
// If it's a function call, extract text after JSON
|
||||
if (parsedJson.type === "function") {
|
||||
const textAfterJson = content
|
||||
.substring(content.indexOf(jsonPart) + jsonPart.length)
|
||||
.trim();
|
||||
return textAfterJson || null;
|
||||
}
|
||||
}
|
||||
// If we can't parse it properly, skip the whole thing
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return content;
|
||||
};
|
||||
|
||||
// Try direct delta text
|
||||
if (
|
||||
chunkObj?.delta &&
|
||||
typeof chunkObj.delta === "object" &&
|
||||
chunkObj.delta !== null
|
||||
) {
|
||||
const delta = chunkObj.delta as Record<string, unknown>;
|
||||
if (typeof delta.text === "string") {
|
||||
text = extractCleanText(delta.text);
|
||||
}
|
||||
}
|
||||
|
||||
// Try event structures
|
||||
if (
|
||||
!text &&
|
||||
chunkObj?.event &&
|
||||
typeof chunkObj.event === "object" &&
|
||||
chunkObj.event !== null
|
||||
) {
|
||||
const event = chunkObj.event as Record<string, unknown>;
|
||||
|
||||
// Try event payload content
|
||||
if (
|
||||
event?.payload &&
|
||||
typeof event.payload === "object" &&
|
||||
event.payload !== null
|
||||
) {
|
||||
const payload = event.payload as Record<string, unknown>;
|
||||
|
||||
// Try direct payload content
|
||||
if (typeof payload.content === "string") {
|
||||
text = extractCleanText(payload.content);
|
||||
}
|
||||
|
||||
// Try turn_complete event structure: payload.turn.output_message.content
|
||||
if (
|
||||
!text &&
|
||||
payload?.turn &&
|
||||
typeof payload.turn === "object" &&
|
||||
payload.turn !== null
|
||||
) {
|
||||
const turn = payload.turn as Record<string, unknown>;
|
||||
if (
|
||||
turn?.output_message &&
|
||||
typeof turn.output_message === "object" &&
|
||||
turn.output_message !== null
|
||||
) {
|
||||
const outputMessage = turn.output_message as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
if (typeof outputMessage.content === "string") {
|
||||
text = extractCleanText(outputMessage.content);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to model_response in steps if no output_message
|
||||
if (
|
||||
!text &&
|
||||
turn?.steps &&
|
||||
Array.isArray(turn.steps) &&
|
||||
turn.steps.length > 0
|
||||
) {
|
||||
for (const step of turn.steps) {
|
||||
if (step && typeof step === "object" && step !== null) {
|
||||
const stepObj = step as Record<string, unknown>;
|
||||
if (
|
||||
stepObj?.model_response &&
|
||||
typeof stepObj.model_response === "object" &&
|
||||
stepObj.model_response !== null
|
||||
) {
|
||||
const modelResponse = stepObj.model_response as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
if (typeof modelResponse.content === "string") {
|
||||
text = extractCleanText(modelResponse.content);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try payload delta
|
||||
if (
|
||||
!text &&
|
||||
payload?.delta &&
|
||||
typeof payload.delta === "object" &&
|
||||
payload.delta !== null
|
||||
) {
|
||||
const delta = payload.delta as Record<string, unknown>;
|
||||
if (typeof delta.text === "string") {
|
||||
text = extractCleanText(delta.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try event delta
|
||||
if (
|
||||
!text &&
|
||||
event?.delta &&
|
||||
typeof event.delta === "object" &&
|
||||
event.delta !== null
|
||||
) {
|
||||
const delta = event.delta as Record<string, unknown>;
|
||||
if (typeof delta.text === "string") {
|
||||
text = extractCleanText(delta.text);
|
||||
}
|
||||
if (!text && typeof delta.content === "string") {
|
||||
text = extractCleanText(delta.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try choices structure (ChatML format)
|
||||
if (
|
||||
!text &&
|
||||
chunkObj?.choices &&
|
||||
Array.isArray(chunkObj.choices) &&
|
||||
chunkObj.choices.length > 0
|
||||
) {
|
||||
const choice = chunkObj.choices[0] as Record<string, unknown>;
|
||||
if (
|
||||
choice?.delta &&
|
||||
typeof choice.delta === "object" &&
|
||||
choice.delta !== null
|
||||
) {
|
||||
const delta = choice.delta as Record<string, unknown>;
|
||||
if (typeof delta.content === "string") {
|
||||
text = extractCleanText(delta.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try direct string content
|
||||
if (!text && typeof chunk === "string") {
|
||||
text = extractCleanText(chunk);
|
||||
}
|
||||
|
||||
return { text, isToolCall: false };
|
||||
};
|
||||
}
|
||||
|
||||
describe("Chunk Processor", () => {
|
||||
const processChunk = createProcessChunk();
|
||||
|
||||
describe("Real Event Structures", () => {
|
||||
test("handles turn_complete event with cancellation policy response", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
event_type: "turn_complete",
|
||||
turn: {
|
||||
turn_id: "50a2d6b7-49ed-4d1e-b1c2-6d68b3f726db",
|
||||
session_id: "e7f62b8e-518c-4450-82df-e65fe49f27a3",
|
||||
input_messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "nice, what's the cancellation policy?",
|
||||
context: null,
|
||||
},
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
turn_id: "50a2d6b7-49ed-4d1e-b1c2-6d68b3f726db",
|
||||
step_id: "54074310-af42-414c-9ffe-fba5b2ead0ad",
|
||||
started_at: "2025-08-27T18:15:25.870703Z",
|
||||
completed_at: "2025-08-27T18:15:51.288993Z",
|
||||
step_type: "inference",
|
||||
model_response: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"According to the search results, the cancellation policy for Red Hat Summit is as follows:\n\n* Cancellations must be received by 5 PM EDT on April 18, 2025 for a 50% refund of the registration fee.\n* No refunds will be given for cancellations received after 5 PM EDT on April 18, 2025.\n* Cancellation of travel reservations and hotel reservations are the responsibility of the registrant.",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
output_message: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"According to the search results, the cancellation policy for Red Hat Summit is as follows:\n\n* Cancellations must be received by 5 PM EDT on April 18, 2025 for a 50% refund of the registration fee.\n* No refunds will be given for cancellations received after 5 PM EDT on April 18, 2025.\n* Cancellation of travel reservations and hotel reservations are the responsibility of the registrant.",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
output_attachments: [],
|
||||
started_at: "2025-08-27T18:15:25.868548Z",
|
||||
completed_at: "2025-08-27T18:15:51.289262Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toContain(
|
||||
"According to the search results, the cancellation policy for Red Hat Summit is as follows:"
|
||||
);
|
||||
expect(result.text).toContain("5 PM EDT on April 18, 2025");
|
||||
});
|
||||
|
||||
test("handles turn_complete event with address response", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
event_type: "turn_complete",
|
||||
turn: {
|
||||
turn_id: "2f4a1520-8ecc-4cb7-bb7b-886939e042b0",
|
||||
session_id: "e7f62b8e-518c-4450-82df-e65fe49f27a3",
|
||||
input_messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "what's francisco's address",
|
||||
context: null,
|
||||
},
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
turn_id: "2f4a1520-8ecc-4cb7-bb7b-886939e042b0",
|
||||
step_id: "c13dd277-1acb-4419-8fbf-d5e2f45392ea",
|
||||
started_at: "2025-08-27T18:14:52.558761Z",
|
||||
completed_at: "2025-08-27T18:15:11.306032Z",
|
||||
step_type: "inference",
|
||||
model_response: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"Francisco Arceo's address is:\n\nRed Hat\nUnited States\n17 Primrose Ln \nBasking Ridge New Jersey 07920",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
output_message: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"Francisco Arceo's address is:\n\nRed Hat\nUnited States\n17 Primrose Ln \nBasking Ridge New Jersey 07920",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
output_attachments: [],
|
||||
started_at: "2025-08-27T18:14:52.553707Z",
|
||||
completed_at: "2025-08-27T18:15:11.306729Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toContain("Francisco Arceo's address is:");
|
||||
expect(result.text).toContain("17 Primrose Ln");
|
||||
expect(result.text).toContain("Basking Ridge New Jersey 07920");
|
||||
});
|
||||
|
||||
test("handles turn_complete event with ticket cost response", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
event_type: "turn_complete",
|
||||
turn: {
|
||||
turn_id: "7ef244a3-efee-42ca-a9c8-942865251002",
|
||||
session_id: "e7f62b8e-518c-4450-82df-e65fe49f27a3",
|
||||
input_messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "what was the ticket cost for summit?",
|
||||
context: null,
|
||||
},
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
turn_id: "7ef244a3-efee-42ca-a9c8-942865251002",
|
||||
step_id: "7651dda0-315a-472d-b1c1-3c2725f55bc5",
|
||||
started_at: "2025-08-27T18:14:21.710611Z",
|
||||
completed_at: "2025-08-27T18:14:39.706452Z",
|
||||
step_type: "inference",
|
||||
model_response: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"The ticket cost for the Red Hat Summit was $999.00 for a conference pass.",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
output_message: {
|
||||
role: "assistant",
|
||||
content:
|
||||
"The ticket cost for the Red Hat Summit was $999.00 for a conference pass.",
|
||||
stop_reason: "end_of_turn",
|
||||
tool_calls: [],
|
||||
},
|
||||
output_attachments: [],
|
||||
started_at: "2025-08-27T18:14:21.705289Z",
|
||||
completed_at: "2025-08-27T18:14:39.706752Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe(
|
||||
"The ticket cost for the Red Hat Summit was $999.00 for a conference pass."
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Function Call Detection", () => {
|
||||
test("detects function calls in direct string chunks", () => {
|
||||
const chunk =
|
||||
'{"type": "function", "name": "knowledge_search", "parameters": {"query": "test"}}';
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(true);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("detects function calls in event payload content", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
content:
|
||||
'{"type": "function", "name": "knowledge_search", "parameters": {"query": "test"}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(true);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("detects tool_calls in delta structure", () => {
|
||||
const chunk = {
|
||||
delta: {
|
||||
tool_calls: [{ function: { name: "knowledge_search" } }],
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(true);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("detects function call in mixed content but skips it", () => {
|
||||
const chunk =
|
||||
'{"type": "function", "name": "knowledge_search", "parameters": {"query": "test"}} Based on the search results, here is your answer.';
|
||||
const result = processChunk(chunk);
|
||||
// This is detected as a tool call and skipped entirely - the implementation prioritizes safety
|
||||
expect(result.isToolCall).toBe(true);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Text Extraction", () => {
|
||||
test("extracts text from direct string chunks", () => {
|
||||
const chunk = "Hello, this is a normal response.";
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("Hello, this is a normal response.");
|
||||
});
|
||||
|
||||
test("extracts text from delta structure", () => {
|
||||
const chunk = {
|
||||
delta: {
|
||||
text: "Hello, this is a normal response.",
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("Hello, this is a normal response.");
|
||||
});
|
||||
|
||||
test("extracts text from choices structure", () => {
|
||||
const chunk = {
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
content: "Hello, this is a normal response.",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("Hello, this is a normal response.");
|
||||
});
|
||||
|
||||
test("prioritizes output_message over model_response in turn structure", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
turn: {
|
||||
steps: [
|
||||
{
|
||||
model_response: {
|
||||
content: "Model response content.",
|
||||
},
|
||||
},
|
||||
],
|
||||
output_message: {
|
||||
content: "Final output message content.",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("Final output message content.");
|
||||
});
|
||||
|
||||
test("falls back to model_response when no output_message", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
payload: {
|
||||
turn: {
|
||||
steps: [
|
||||
{
|
||||
model_response: {
|
||||
content: "This is from the model response.",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("This is from the model response.");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Edge Cases", () => {
|
||||
test("handles empty chunks", () => {
|
||||
const result = processChunk("");
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe("");
|
||||
});
|
||||
|
||||
test("handles null chunks", () => {
|
||||
const result = processChunk(null);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("handles undefined chunks", () => {
|
||||
const result = processChunk(undefined);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("handles chunks with no text content", () => {
|
||||
const chunk = {
|
||||
event: {
|
||||
metadata: {
|
||||
timestamp: "2024-01-01",
|
||||
},
|
||||
},
|
||||
};
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(false);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
|
||||
test("handles malformed JSON in function calls gracefully", () => {
|
||||
const chunk =
|
||||
'{"type": "function", "name": "knowledge_search"} incomplete json';
|
||||
const result = processChunk(chunk);
|
||||
expect(result.isToolCall).toBe(true);
|
||||
expect(result.text).toBe(null);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -31,6 +31,9 @@ const mockClient = {
|
|||
toolgroups: {
|
||||
list: jest.fn(),
|
||||
},
|
||||
vectorDBs: {
|
||||
list: jest.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
jest.mock("@/hooks/use-auth-client", () => ({
|
||||
|
@ -164,7 +167,7 @@ describe("ChatPlaygroundPage", () => {
|
|||
session_name: "Test Session",
|
||||
started_at: new Date().toISOString(),
|
||||
turns: [],
|
||||
}); // No turns by default
|
||||
});
|
||||
mockClient.agents.retrieve.mockResolvedValue({
|
||||
agent_id: "test-agent",
|
||||
agent_config: {
|
||||
|
@ -417,7 +420,6 @@ describe("ChatPlaygroundPage", () => {
|
|||
});
|
||||
|
||||
await waitFor(() => {
|
||||
// first agent should be auto-selected
|
||||
expect(mockClient.agents.session.create).toHaveBeenCalledWith(
|
||||
"agent_123",
|
||||
{ session_name: "Default Session" }
|
||||
|
@ -464,7 +466,7 @@ describe("ChatPlaygroundPage", () => {
|
|||
});
|
||||
});
|
||||
|
||||
test("hides delete button when only one agent exists", async () => {
|
||||
test("shows delete button even when only one agent exists", async () => {
|
||||
mockClient.agents.list.mockResolvedValue({
|
||||
data: [mockAgents[0]],
|
||||
});
|
||||
|
@ -474,9 +476,7 @@ describe("ChatPlaygroundPage", () => {
|
|||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByTitle("Delete current agent")
|
||||
).not.toBeInTheDocument();
|
||||
expect(screen.getByTitle("Delete current agent")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -505,7 +505,7 @@ describe("ChatPlaygroundPage", () => {
|
|||
await waitFor(() => {
|
||||
expect(mockClient.agents.delete).toHaveBeenCalledWith("agent_123");
|
||||
expect(global.confirm).toHaveBeenCalledWith(
|
||||
"Are you sure you want to delete this agent? This action cannot be undone and will delete all associated sessions."
|
||||
"Are you sure you want to delete this agent? This action cannot be undone and will delete the agent and all its sessions."
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -584,4 +584,207 @@ describe("ChatPlaygroundPage", () => {
|
|||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe("RAG File Upload", () => {
|
||||
let mockFileReader: {
|
||||
readAsDataURL: jest.Mock;
|
||||
readAsText: jest.Mock;
|
||||
result: string | null;
|
||||
onload: (() => void) | null;
|
||||
onerror: (() => void) | null;
|
||||
};
|
||||
let mockRAGTool: {
|
||||
insert: jest.Mock;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFileReader = {
|
||||
readAsDataURL: jest.fn(),
|
||||
readAsText: jest.fn(),
|
||||
result: null,
|
||||
onload: null,
|
||||
onerror: null,
|
||||
};
|
||||
global.FileReader = jest.fn(() => mockFileReader);
|
||||
|
||||
mockRAGTool = {
|
||||
insert: jest.fn().mockResolvedValue({}),
|
||||
};
|
||||
mockClient.toolRuntime = {
|
||||
ragTool: mockRAGTool,
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test("handles text file upload", async () => {
|
||||
new File(["Hello, world!"], "test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
mockClient.agents.retrieve.mockResolvedValue({
|
||||
agent_id: "test-agent",
|
||||
agent_config: {
|
||||
toolgroups: [
|
||||
{
|
||||
name: "builtin::rag/knowledge_search",
|
||||
args: { vector_db_ids: ["test-vector-db"] },
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
render(<ChatPlaygroundPage />);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId("chat-component")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const chatComponent = screen.getByTestId("chat-component");
|
||||
chatComponent.getAttribute("data-onragfileupload");
|
||||
|
||||
// this is a simplified test
|
||||
expect(mockRAGTool.insert).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("handles PDF file upload with FileReader", async () => {
|
||||
new File([new ArrayBuffer(1000)], "test.pdf", {
|
||||
type: "application/pdf",
|
||||
});
|
||||
|
||||
const mockDataURL = "data:application/pdf;base64,JVBERi0xLjQK";
|
||||
mockFileReader.result = mockDataURL;
|
||||
|
||||
mockClient.agents.retrieve.mockResolvedValue({
|
||||
agent_id: "test-agent",
|
||||
agent_config: {
|
||||
toolgroups: [
|
||||
{
|
||||
name: "builtin::rag/knowledge_search",
|
||||
args: { vector_db_ids: ["test-vector-db"] },
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
render(<ChatPlaygroundPage />);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId("chat-component")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(global.FileReader).toBeDefined();
|
||||
});
|
||||
|
||||
test("handles different file types correctly", () => {
|
||||
const getContentType = (filename: string): string => {
|
||||
const ext = filename.toLowerCase().split(".").pop();
|
||||
switch (ext) {
|
||||
case "pdf":
|
||||
return "application/pdf";
|
||||
case "txt":
|
||||
return "text/plain";
|
||||
case "md":
|
||||
return "text/markdown";
|
||||
case "html":
|
||||
return "text/html";
|
||||
case "csv":
|
||||
return "text/csv";
|
||||
case "json":
|
||||
return "application/json";
|
||||
case "docx":
|
||||
return "application/vnd.openxmlformats-officedocument.wordprocessingml.document";
|
||||
case "doc":
|
||||
return "application/msword";
|
||||
default:
|
||||
return "application/octet-stream";
|
||||
}
|
||||
};
|
||||
|
||||
expect(getContentType("test.pdf")).toBe("application/pdf");
|
||||
expect(getContentType("test.txt")).toBe("text/plain");
|
||||
expect(getContentType("test.md")).toBe("text/markdown");
|
||||
expect(getContentType("test.html")).toBe("text/html");
|
||||
expect(getContentType("test.csv")).toBe("text/csv");
|
||||
expect(getContentType("test.json")).toBe("application/json");
|
||||
expect(getContentType("test.docx")).toBe(
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
);
|
||||
expect(getContentType("test.doc")).toBe("application/msword");
|
||||
expect(getContentType("test.unknown")).toBe("application/octet-stream");
|
||||
});
|
||||
|
||||
test("determines text vs binary file types correctly", () => {
|
||||
const isTextFile = (mimeType: string): boolean => {
|
||||
return (
|
||||
mimeType.startsWith("text/") ||
|
||||
mimeType === "application/json" ||
|
||||
mimeType === "text/markdown" ||
|
||||
mimeType === "text/html" ||
|
||||
mimeType === "text/csv"
|
||||
);
|
||||
};
|
||||
|
||||
expect(isTextFile("text/plain")).toBe(true);
|
||||
expect(isTextFile("text/markdown")).toBe(true);
|
||||
expect(isTextFile("text/html")).toBe(true);
|
||||
expect(isTextFile("text/csv")).toBe(true);
|
||||
expect(isTextFile("application/json")).toBe(true);
|
||||
|
||||
expect(isTextFile("application/pdf")).toBe(false);
|
||||
expect(isTextFile("application/msword")).toBe(false);
|
||||
expect(
|
||||
isTextFile(
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
)
|
||||
).toBe(false);
|
||||
expect(isTextFile("application/octet-stream")).toBe(false);
|
||||
});
|
||||
|
||||
test("handles FileReader error gracefully", async () => {
|
||||
const pdfFile = new File([new ArrayBuffer(1000)], "test.pdf", {
|
||||
type: "application/pdf",
|
||||
});
|
||||
|
||||
mockFileReader.onerror = jest.fn();
|
||||
const mockError = new Error("FileReader failed");
|
||||
|
||||
const fileReaderPromise = new Promise<string>((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => resolve(reader.result as string);
|
||||
reader.onerror = () => reject(reader.error || mockError);
|
||||
reader.readAsDataURL(pdfFile);
|
||||
|
||||
setTimeout(() => {
|
||||
reader.onerror?.(new ProgressEvent("error"));
|
||||
}, 0);
|
||||
});
|
||||
|
||||
await expect(fileReaderPromise).rejects.toBeDefined();
|
||||
});
|
||||
|
||||
test("handles large file upload with FileReader approach", () => {
|
||||
// create a large file
|
||||
const largeFile = new File(
|
||||
[new ArrayBuffer(10 * 1024 * 1024)],
|
||||
"large.pdf",
|
||||
{
|
||||
type: "application/pdf",
|
||||
}
|
||||
);
|
||||
|
||||
expect(largeFile.size).toBe(10 * 1024 * 1024); // 10MB
|
||||
|
||||
expect(global.FileReader).toBeDefined();
|
||||
|
||||
const reader = new FileReader();
|
||||
expect(reader.readAsDataURL).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -35,6 +35,7 @@ interface ChatPropsBase {
|
|||
) => void;
|
||||
setMessages?: (messages: Message[]) => void;
|
||||
transcribeAudio?: (blob: Blob) => Promise<string>;
|
||||
onRAGFileUpload?: (file: File) => Promise<void>;
|
||||
}
|
||||
|
||||
interface ChatPropsWithoutSuggestions extends ChatPropsBase {
|
||||
|
@ -62,6 +63,7 @@ export function Chat({
|
|||
onRateResponse,
|
||||
setMessages,
|
||||
transcribeAudio,
|
||||
onRAGFileUpload,
|
||||
}: ChatProps) {
|
||||
const lastMessage = messages.at(-1);
|
||||
const isEmpty = messages.length === 0;
|
||||
|
@ -226,16 +228,17 @@ export function Chat({
|
|||
isPending={isGenerating || isTyping}
|
||||
handleSubmit={handleSubmit}
|
||||
>
|
||||
{({ files, setFiles }) => (
|
||||
{() => (
|
||||
<MessageInput
|
||||
value={input}
|
||||
onChange={handleInputChange}
|
||||
allowAttachments
|
||||
files={files}
|
||||
setFiles={setFiles}
|
||||
allowAttachments={true}
|
||||
files={null}
|
||||
setFiles={() => {}}
|
||||
stop={handleStop}
|
||||
isGenerating={isGenerating}
|
||||
transcribeAudio={transcribeAudio}
|
||||
onRAGFileUpload={onRAGFileUpload}
|
||||
/>
|
||||
)}
|
||||
</ChatForm>
|
||||
|
|
|
@ -14,6 +14,7 @@ import { Card } from "@/components/ui/card";
|
|||
import { Trash2 } from "lucide-react";
|
||||
import type { Message } from "@/components/chat-playground/chat-message";
|
||||
import { useAuthClient } from "@/hooks/use-auth-client";
|
||||
import { cleanMessageContent } from "@/lib/message-content-utils";
|
||||
import type {
|
||||
Session,
|
||||
SessionCreateParams,
|
||||
|
@ -219,10 +220,7 @@ export function Conversations({
|
|||
messages.push({
|
||||
id: `${turn.turn_id}-assistant-${messages.length}`,
|
||||
role: "assistant",
|
||||
content:
|
||||
typeof turn.output_message.content === "string"
|
||||
? turn.output_message.content
|
||||
: JSON.stringify(turn.output_message.content),
|
||||
content: cleanMessageContent(turn.output_message.content),
|
||||
createdAt: new Date(
|
||||
turn.completed_at || turn.started_at || Date.now()
|
||||
),
|
||||
|
@ -271,7 +269,7 @@ export function Conversations({
|
|||
);
|
||||
|
||||
const deleteSession = async (sessionId: string) => {
|
||||
if (sessions.length <= 1 || !selectedAgentId) {
|
||||
if (!selectedAgentId) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -324,7 +322,6 @@ export function Conversations({
|
|||
}
|
||||
}, [currentSession]);
|
||||
|
||||
// Don't render if no agent is selected
|
||||
if (!selectedAgentId) {
|
||||
return null;
|
||||
}
|
||||
|
@ -357,7 +354,7 @@ export function Conversations({
|
|||
+ New
|
||||
</Button>
|
||||
|
||||
{currentSession && sessions.length > 1 && (
|
||||
{currentSession && (
|
||||
<Button
|
||||
onClick={() => deleteSession(currentSession.id)}
|
||||
variant="outline"
|
||||
|
|
|
@ -21,6 +21,7 @@ interface MessageInputBaseProps
|
|||
isGenerating: boolean;
|
||||
enableInterrupt?: boolean;
|
||||
transcribeAudio?: (blob: Blob) => Promise<string>;
|
||||
onRAGFileUpload?: (file: File) => Promise<void>;
|
||||
}
|
||||
|
||||
interface MessageInputWithoutAttachmentProps extends MessageInputBaseProps {
|
||||
|
@ -213,8 +214,13 @@ export function MessageInput({
|
|||
className
|
||||
)}
|
||||
{...(props.allowAttachments
|
||||
? omit(props, ["allowAttachments", "files", "setFiles"])
|
||||
: omit(props, ["allowAttachments"]))}
|
||||
? omit(props, [
|
||||
"allowAttachments",
|
||||
"files",
|
||||
"setFiles",
|
||||
"onRAGFileUpload",
|
||||
])
|
||||
: omit(props, ["allowAttachments", "onRAGFileUpload"]))}
|
||||
/>
|
||||
|
||||
{props.allowAttachments && (
|
||||
|
@ -254,11 +260,19 @@ export function MessageInput({
|
|||
size="icon"
|
||||
variant="outline"
|
||||
className="h-8 w-8"
|
||||
aria-label="Attach a file"
|
||||
disabled={true}
|
||||
aria-label="Upload file to RAG"
|
||||
disabled={false}
|
||||
onClick={async () => {
|
||||
const files = await showFileUploadDialog();
|
||||
addFiles(files);
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
input.accept = ".pdf,.txt,.md,.html,.csv,.json";
|
||||
input.onchange = async e => {
|
||||
const file = (e.target as HTMLInputElement).files?.[0];
|
||||
if (file && props.onRAGFileUpload) {
|
||||
await props.onRAGFileUpload(file);
|
||||
}
|
||||
};
|
||||
input.click();
|
||||
}}
|
||||
>
|
||||
<Paperclip className="h-4 w-4" />
|
||||
|
@ -337,28 +351,6 @@ function FileUploadOverlay({ isDragging }: FileUploadOverlayProps) {
|
|||
);
|
||||
}
|
||||
|
||||
function showFileUploadDialog() {
|
||||
const input = document.createElement("input");
|
||||
|
||||
input.type = "file";
|
||||
input.multiple = true;
|
||||
input.accept = "*/*";
|
||||
input.click();
|
||||
|
||||
return new Promise<File[] | null>(resolve => {
|
||||
input.onchange = e => {
|
||||
const files = (e.currentTarget as HTMLInputElement).files;
|
||||
|
||||
if (files) {
|
||||
resolve(Array.from(files));
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(null);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function TranscribingOverlay() {
|
||||
return (
|
||||
<motion.div
|
||||
|
|
243
llama_stack/ui/components/chat-playground/vector-db-creator.tsx
Normal file
243
llama_stack/ui/components/chat-playground/vector-db-creator.tsx
Normal file
|
@ -0,0 +1,243 @@
|
|||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Card } from "@/components/ui/card";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { useAuthClient } from "@/hooks/use-auth-client";
|
||||
import type { Model } from "llama-stack-client/resources/models";
|
||||
|
||||
interface VectorDBCreatorProps {
|
||||
models: Model[];
|
||||
onVectorDBCreated?: (vectorDbId: string) => void;
|
||||
onCancel?: () => void;
|
||||
}
|
||||
|
||||
interface VectorDBProvider {
|
||||
api: string;
|
||||
provider_id: string;
|
||||
provider_type: string;
|
||||
}
|
||||
|
||||
export function VectorDBCreator({
|
||||
models,
|
||||
onVectorDBCreated,
|
||||
onCancel,
|
||||
}: VectorDBCreatorProps) {
|
||||
const [vectorDbName, setVectorDbName] = useState("");
|
||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState("");
|
||||
const [selectedProvider, setSelectedProvider] = useState("faiss");
|
||||
const [availableProviders, setAvailableProviders] = useState<
|
||||
VectorDBProvider[]
|
||||
>([]);
|
||||
const [isCreating, setIsCreating] = useState(false);
|
||||
const [isLoadingProviders, setIsLoadingProviders] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const client = useAuthClient();
|
||||
|
||||
const embeddingModels = models.filter(
|
||||
model => model.model_type === "embedding"
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchProviders = async () => {
|
||||
setIsLoadingProviders(true);
|
||||
try {
|
||||
const providersResponse = await client.providers.list();
|
||||
|
||||
const vectorIoProviders = providersResponse.filter(
|
||||
(provider: VectorDBProvider) => provider.api === "vector_io"
|
||||
);
|
||||
|
||||
setAvailableProviders(vectorIoProviders);
|
||||
|
||||
if (vectorIoProviders.length > 0) {
|
||||
const faissProvider = vectorIoProviders.find(
|
||||
(p: VectorDBProvider) => p.provider_id === "faiss"
|
||||
);
|
||||
setSelectedProvider(
|
||||
faissProvider?.provider_id || vectorIoProviders[0].provider_id
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error fetching providers:", err);
|
||||
setAvailableProviders([
|
||||
{
|
||||
api: "vector_io",
|
||||
provider_id: "faiss",
|
||||
provider_type: "inline::faiss",
|
||||
},
|
||||
]);
|
||||
} finally {
|
||||
setIsLoadingProviders(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchProviders();
|
||||
}, [client]);
|
||||
|
||||
const handleCreate = async () => {
|
||||
if (!vectorDbName.trim() || !selectedEmbeddingModel) {
|
||||
setError("Please provide a name and select an embedding model");
|
||||
return;
|
||||
}
|
||||
|
||||
setIsCreating(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const embeddingModel = embeddingModels.find(
|
||||
m => m.identifier === selectedEmbeddingModel
|
||||
);
|
||||
|
||||
if (!embeddingModel) {
|
||||
throw new Error("Selected embedding model not found");
|
||||
}
|
||||
|
||||
const embeddingDimension = embeddingModel.metadata
|
||||
?.embedding_dimension as number;
|
||||
|
||||
if (!embeddingDimension) {
|
||||
throw new Error("Embedding dimension not available for selected model");
|
||||
}
|
||||
|
||||
const vectorDbId = vectorDbName.trim() || `vector_db_${Date.now()}`;
|
||||
|
||||
const response = await client.vectorDBs.register({
|
||||
vector_db_id: vectorDbId,
|
||||
embedding_model: selectedEmbeddingModel,
|
||||
embedding_dimension: embeddingDimension,
|
||||
provider_id: selectedProvider,
|
||||
});
|
||||
|
||||
onVectorDBCreated?.(response.identifier || vectorDbId);
|
||||
} catch (err) {
|
||||
console.error("Error creating vector DB:", err);
|
||||
setError(
|
||||
err instanceof Error ? err.message : "Failed to create vector DB"
|
||||
);
|
||||
} finally {
|
||||
setIsCreating(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Card className="p-6 space-y-4">
|
||||
<h3 className="text-lg font-semibold">Create Vector Database</h3>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<label className="text-sm font-medium block mb-2">
|
||||
Vector DB Name
|
||||
</label>
|
||||
<Input
|
||||
value={vectorDbName}
|
||||
onChange={e => setVectorDbName(e.target.value)}
|
||||
placeholder="My Vector Database"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="text-sm font-medium block mb-2">
|
||||
Embedding Model
|
||||
</label>
|
||||
<Select
|
||||
value={selectedEmbeddingModel}
|
||||
onValueChange={setSelectedEmbeddingModel}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select Embedding Model" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{embeddingModels.map(model => (
|
||||
<SelectItem key={model.identifier} value={model.identifier}>
|
||||
{model.identifier}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{selectedEmbeddingModel && (
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Dimension:{" "}
|
||||
{embeddingModels.find(
|
||||
m => m.identifier === selectedEmbeddingModel
|
||||
)?.metadata?.embedding_dimension || "Unknown"}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="text-sm font-medium block mb-2">
|
||||
Vector Database Provider
|
||||
</label>
|
||||
<Select
|
||||
value={selectedProvider}
|
||||
onValueChange={setSelectedProvider}
|
||||
disabled={isLoadingProviders}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue
|
||||
placeholder={
|
||||
isLoadingProviders
|
||||
? "Loading providers..."
|
||||
: "Select Provider"
|
||||
}
|
||||
/>
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{availableProviders.map(provider => (
|
||||
<SelectItem
|
||||
key={provider.provider_id}
|
||||
value={provider.provider_id}
|
||||
>
|
||||
{provider.provider_id}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{selectedProvider && (
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Selected provider: {selectedProvider}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="text-destructive text-sm bg-destructive/10 p-2 rounded">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-2 pt-2">
|
||||
<Button
|
||||
onClick={handleCreate}
|
||||
disabled={
|
||||
isCreating || !vectorDbName.trim() || !selectedEmbeddingModel
|
||||
}
|
||||
className="flex-1"
|
||||
>
|
||||
{isCreating ? "Creating..." : "Create Vector DB"}
|
||||
</Button>
|
||||
{onCancel && (
|
||||
<Button variant="outline" onClick={onCancel} className="flex-1">
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="text-xs text-muted-foreground bg-muted/50 p-3 rounded">
|
||||
<strong>Note:</strong> This will create a new vector database that can
|
||||
be used with RAG tools. After creation, you'll be able to upload
|
||||
documents and use it for knowledge search in your agent conversations.
|
||||
</div>
|
||||
</Card>
|
||||
);
|
||||
}
|
51
llama_stack/ui/lib/message-content-utils.ts
Normal file
51
llama_stack/ui/lib/message-content-utils.ts
Normal file
|
@ -0,0 +1,51 @@
|
|||
// check if content contains function call JSON
|
||||
export const containsToolCall = (content: string): boolean => {
|
||||
return (
|
||||
content.includes('"type": "function"') ||
|
||||
content.includes('"name": "knowledge_search"') ||
|
||||
content.includes('"parameters":') ||
|
||||
!!content.match(/\{"type":\s*"function".*?\}/)
|
||||
);
|
||||
};
|
||||
|
||||
export const extractCleanText = (content: string): string | null => {
|
||||
if (containsToolCall(content)) {
|
||||
try {
|
||||
// parse and extract non-function call parts
|
||||
const jsonMatch = content.match(/\{"type":\s*"function"[^}]*\}[^}]*\}/);
|
||||
if (jsonMatch) {
|
||||
const jsonPart = jsonMatch[0];
|
||||
const parsedJson = JSON.parse(jsonPart);
|
||||
|
||||
// if function call, extract text after JSON
|
||||
if (parsedJson.type === "function") {
|
||||
const textAfterJson = content
|
||||
.substring(content.indexOf(jsonPart) + jsonPart.length)
|
||||
.trim();
|
||||
return textAfterJson || null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return content;
|
||||
};
|
||||
|
||||
// removes function call JSON handling different content types
|
||||
export const cleanMessageContent = (
|
||||
content: string | unknown[] | unknown
|
||||
): string => {
|
||||
if (typeof content === "string") {
|
||||
const cleaned = extractCleanText(content);
|
||||
return cleaned || "";
|
||||
} else if (Array.isArray(content)) {
|
||||
return content
|
||||
.filter((item: { type: string }) => item.type === "text")
|
||||
.map((item: { text: string }) => item.text)
|
||||
.join("");
|
||||
} else {
|
||||
return JSON.stringify(content);
|
||||
}
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue