feat(ui): add infinite scroll pagination to chat completions/responses logs table (#2466)
Some checks failed
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 2s
Integration Tests / test-matrix (http, 3.10, inspect) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.10, providers) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.10, datasets) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.10, post_training) (push) Failing after 6s
Integration Tests / test-matrix (http, 3.11, datasets) (push) Failing after 4s
Integration Tests / test-matrix (http, 3.10, tool_runtime) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.11, agents) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.11, inference) (push) Failing after 5s
Integration Tests / test-matrix (http, 3.10, vector_io) (push) Failing after 10s
Integration Tests / test-matrix (http, 3.11, inspect) (push) Failing after 7s
Integration Tests / test-matrix (http, 3.11, post_training) (push) Failing after 7s
Integration Tests / test-matrix (http, 3.11, tool_runtime) (push) Failing after 6s
Integration Tests / test-matrix (http, 3.10, agents) (push) Failing after 10s
Integration Tests / test-matrix (http, 3.11, vector_io) (push) Failing after 7s
Integration Tests / test-matrix (http, 3.12, inspect) (push) Failing after 6s
Integration Tests / test-matrix (http, 3.11, scoring) (push) Failing after 10s
Integration Tests / test-matrix (http, 3.12, datasets) (push) Failing after 9s
Integration Tests / test-matrix (http, 3.12, inference) (push) Failing after 8s
Integration Tests / test-matrix (http, 3.10, scoring) (push) Failing after 8s
Integration Tests / test-matrix (http, 3.10, inference) (push) Failing after 11s
Integration Tests / test-matrix (http, 3.12, post_training) (push) Failing after 8s
Integration Tests / test-matrix (http, 3.12, tool_runtime) (push) Failing after 9s
Integration Tests / test-matrix (http, 3.12, agents) (push) Failing after 11s
Integration Tests / test-matrix (http, 3.12, scoring) (push) Failing after 7s
Integration Tests / test-matrix (http, 3.12, vector_io) (push) Failing after 7s
Integration Tests / test-matrix (http, 3.12, providers) (push) Failing after 9s
Integration Tests / test-matrix (library, 3.10, agents) (push) Failing after 9s
Integration Tests / test-matrix (library, 3.10, datasets) (push) Failing after 10s
Integration Tests / test-matrix (library, 3.10, inference) (push) Failing after 8s
Integration Tests / test-matrix (library, 3.10, providers) (push) Failing after 10s
Integration Tests / test-matrix (library, 3.10, post_training) (push) Failing after 8s
Integration Tests / test-matrix (library, 3.11, datasets) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.10, scoring) (push) Failing after 10s
Integration Tests / test-matrix (http, 3.11, providers) (push) Failing after 11s
Integration Tests / test-matrix (library, 3.10, inspect) (push) Failing after 10s
Integration Tests / test-matrix (library, 3.10, vector_io) (push) Failing after 8s
Integration Tests / test-matrix (library, 3.10, tool_runtime) (push) Failing after 8s
Integration Tests / test-matrix (library, 3.11, agents) (push) Failing after 8s
Integration Tests / test-matrix (library, 3.11, post_training) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.11, inspect) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.11, inference) (push) Failing after 7s
Integration Tests / test-matrix (library, 3.11, tool_runtime) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.11, providers) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.11, scoring) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.11, vector_io) (push) Failing after 7s
Integration Tests / test-matrix (library, 3.12, datasets) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.12, agents) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.12, inference) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.12, inspect) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.12, post_training) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.12, providers) (push) Failing after 6s
Integration Tests / test-matrix (library, 3.12, scoring) (push) Failing after 5s
Integration Tests / test-matrix (library, 3.12, tool_runtime) (push) Failing after 5s
Test External Providers / test-external-providers (venv) (push) Failing after 16s
Integration Tests / test-matrix (library, 3.12, vector_io) (push) Failing after 20s
Unit Tests / unit-tests (3.11) (push) Failing after 16s
Unit Tests / unit-tests (3.13) (push) Failing after 14s
Unit Tests / unit-tests (3.10) (push) Failing after 48s
Unit Tests / unit-tests (3.12) (push) Failing after 46s
Pre-commit / pre-commit (push) Successful in 1m23s

## Summary:

This commit adds infinite scroll pagination to the chat completions and
responses tables.


## Test Plan:
  1. Run unit tests: npm run test
  2. Manual testing: Navigate to chat
  completions/responses pages
  3. Verify infinite scroll triggers when approaching
  bottom
  4. Added playwright tests: npm run test:e2e
This commit is contained in:
ehhuang 2025-06-18 15:28:39 -07:00 committed by GitHub
parent 90d03552d4
commit e6bfc717cb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 1145 additions and 388 deletions

View file

@ -15,26 +15,60 @@ jest.mock("next/navigation", () => ({
// Mock helper functions
jest.mock("@/lib/truncate-text");
// Mock the client
jest.mock("@/lib/client", () => ({
client: {
responses: {
list: jest.fn(),
},
},
}));
// Mock the usePagination hook
const mockLoadMore = jest.fn();
jest.mock("@/hooks/usePagination", () => ({
usePagination: jest.fn(() => ({
data: [],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
})),
}));
// Import the mocked functions
import { truncateText as originalTruncateText } from "@/lib/truncate-text";
// Import the mocked hook
import { usePagination } from "@/hooks/usePagination";
const mockedUsePagination = usePagination as jest.MockedFunction<
typeof usePagination
>;
// Cast to jest.Mock for typings
const truncateText = originalTruncateText as jest.Mock;
describe("ResponsesTable", () => {
const defaultProps = {
data: [] as OpenAIResponse[],
isLoading: false,
error: null,
};
const defaultProps = {};
beforeEach(() => {
// Reset all mocks before each test
mockPush.mockClear();
truncateText.mockClear();
mockLoadMore.mockClear();
jest.clearAllMocks();
// Default pass-through implementation
truncateText.mockImplementation((text: string | undefined) => text);
// Default hook return value
mockedUsePagination.mockReturnValue({
data: [],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
});
test("renders without crashing with default props", () => {
@ -65,7 +99,16 @@ describe("ResponsesTable", () => {
],
};
render(<ResponsesTable {...defaultProps} data={[mockResponse]} />);
// Configure the mock to return our test data
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
const row = screen.getByText("Test input").closest("tr");
if (row) {
@ -77,10 +120,16 @@ describe("ResponsesTable", () => {
});
describe("Loading State", () => {
test("renders skeleton UI when isLoading is true", () => {
const { container } = render(
<ResponsesTable {...defaultProps} isLoading={true} />,
);
test("renders skeleton UI when status is loading", () => {
mockedUsePagination.mockReturnValue({
data: [],
status: "loading",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
const { container } = render(<ResponsesTable {...defaultProps} />);
// Check for skeleton in the table caption
const tableCaption = container.querySelector("caption");
@ -105,42 +154,50 @@ describe("ResponsesTable", () => {
});
describe("Error State", () => {
test("renders error message when error prop is provided", () => {
test("renders error message when error is provided", () => {
const errorMessage = "Network Error";
render(
<ResponsesTable
{...defaultProps}
error={{ name: "Error", message: errorMessage }}
/>,
);
mockedUsePagination.mockReturnValue({
data: [],
status: "error",
hasMore: false,
error: { name: "Error", message: errorMessage } as Error,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(
screen.getByText(`Error fetching data: ${errorMessage}`),
screen.getByText("Unable to load chat completions"),
).toBeInTheDocument();
expect(screen.getByText(errorMessage)).toBeInTheDocument();
});
test("renders default error message when error.message is not available", () => {
render(
<ResponsesTable
{...defaultProps}
error={{ name: "Error", message: "" }}
/>,
);
expect(
screen.getByText("Error fetching data: An unknown error occurred"),
).toBeInTheDocument();
});
test.each([{ name: "Error", message: "" }, {}])(
"renders default error message when error has no message",
(errorObject) => {
mockedUsePagination.mockReturnValue({
data: [],
status: "error",
hasMore: false,
error: errorObject as Error,
loadMore: mockLoadMore,
});
test("renders default error message when error prop is an object without message", () => {
render(<ResponsesTable {...defaultProps} error={{} as Error} />);
expect(
screen.getByText("Error fetching data: An unknown error occurred"),
).toBeInTheDocument();
});
render(<ResponsesTable {...defaultProps} />);
expect(
screen.getByText("Unable to load chat completions"),
).toBeInTheDocument();
expect(
screen.getByText(
"An unexpected error occurred while loading the data.",
),
).toBeInTheDocument();
},
);
});
describe("Empty State", () => {
test('renders "No responses found." and no table when data array is empty', () => {
render(<ResponsesTable data={[]} isLoading={false} error={null} />);
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("No responses found.")).toBeInTheDocument();
// Ensure that the table structure is NOT rendered in the empty state
@ -151,7 +208,7 @@ describe("ResponsesTable", () => {
describe("Data Rendering", () => {
test("renders table caption, headers, and response data correctly", () => {
const mockResponses = [
const mockResponses: OpenAIResponse[] = [
{
id: "resp_1",
object: "response" as const,
@ -196,9 +253,15 @@ describe("ResponsesTable", () => {
},
];
render(
<ResponsesTable data={mockResponses} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: mockResponses,
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
// Table caption
expect(
@ -246,9 +309,15 @@ describe("ResponsesTable", () => {
],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("Simple string input")).toBeInTheDocument();
});
@ -272,9 +341,15 @@ describe("ResponsesTable", () => {
],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("Array input text")).toBeInTheDocument();
});
@ -294,9 +369,15 @@ describe("ResponsesTable", () => {
],
};
const { container } = render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
const { container } = render(<ResponsesTable {...defaultProps} />);
// Find the input cell (first cell in the data row) and verify it's empty
const inputCell = container.querySelector("tbody tr td:first-child");
@ -323,9 +404,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("Simple string output")).toBeInTheDocument();
});
@ -349,9 +436,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("Array output text")).toBeInTheDocument();
});
@ -374,9 +467,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(
screen.getByText('search_function({"query": "test"})'),
).toBeInTheDocument();
@ -400,9 +499,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(screen.getByText("simple_function({})")).toBeInTheDocument();
});
@ -423,9 +528,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
expect(
screen.getByText("web_search_call(status: completed)"),
).toBeInTheDocument();
@ -449,9 +560,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
// Should contain the JSON stringified version
expect(screen.getByText(/unknown_call/)).toBeInTheDocument();
});
@ -472,9 +589,15 @@ describe("ResponsesTable", () => {
input: [{ type: "message", content: "input" }],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
// Should contain the JSON stringified version of the output array
expect(screen.getByText(/unknown_type/)).toBeInTheDocument();
});
@ -520,18 +643,21 @@ describe("ResponsesTable", () => {
],
};
render(
<ResponsesTable data={[mockResponse]} isLoading={false} error={null} />,
);
mockedUsePagination.mockReturnValue({
data: [mockResponse],
status: "idle",
hasMore: false,
error: null,
loadMore: mockLoadMore,
});
render(<ResponsesTable {...defaultProps} />);
// The truncated text should be present for both input and output
const truncatedTexts = screen.getAllByText(
longInput.slice(0, 10) + "...",
);
expect(truncatedTexts.length).toBe(2); // one for input, one for output
truncatedTexts.forEach((textElement) =>
expect(textElement).toBeInTheDocument(),
);
});
});
});

View file

@ -2,10 +2,13 @@
import {
OpenAIResponse,
ResponseInput,
ResponseInputMessageContent,
UsePaginationOptions,
} from "@/lib/types";
import { LogsTable, LogTableRow } from "@/components/logs/logs-table";
import { usePagination } from "@/hooks/usePagination";
import { client } from "@/lib/client";
import type { ResponseListResponse } from "llama-stack-client/resources/responses/responses";
import {
isMessageInput,
isMessageItem,
@ -17,11 +20,34 @@ import {
} from "./utils/item-types";
interface ResponsesTableProps {
data: OpenAIResponse[];
isLoading: boolean;
error: Error | null;
/** Optional pagination configuration */
paginationOptions?: UsePaginationOptions;
}
/**
* Helper function to convert ResponseListResponse.Data to OpenAIResponse
*/
const convertResponseListData = (
responseData: ResponseListResponse.Data,
): OpenAIResponse => {
return {
id: responseData.id,
created_at: responseData.created_at,
model: responseData.model,
object: responseData.object,
status: responseData.status,
output: responseData.output as OpenAIResponse["output"],
input: responseData.input as OpenAIResponse["input"],
error: responseData.error,
parallel_tool_calls: responseData.parallel_tool_calls,
previous_response_id: responseData.previous_response_id,
temperature: responseData.temperature,
top_p: responseData.top_p,
truncation: responseData.truncation,
user: responseData.user,
};
};
function getInputText(response: OpenAIResponse): string {
const firstInput = response.input.find(isMessageInput);
if (firstInput) {
@ -98,18 +124,43 @@ function formatResponseToRow(response: OpenAIResponse): LogTableRow {
};
}
export function ResponsesTable({
data,
isLoading,
error,
}: ResponsesTableProps) {
export function ResponsesTable({ paginationOptions }: ResponsesTableProps) {
const fetchFunction = async (params: {
after?: string;
limit: number;
model?: string;
order?: string;
}) => {
const response = await client.responses.list({
after: params.after,
limit: params.limit,
...(params.model && { model: params.model }),
...(params.order && { order: params.order }),
} as any);
const listResponse = response as ResponseListResponse;
return {
...listResponse,
data: listResponse.data.map(convertResponseListData),
};
};
const { data, status, hasMore, error, loadMore } = usePagination({
...paginationOptions,
fetchFunction,
errorMessagePrefix: "responses",
});
const formattedData = data.map(formatResponseToRow);
return (
<LogsTable
data={formattedData}
isLoading={isLoading}
status={status}
hasMore={hasMore}
error={error}
onLoadMore={loadMore}
caption="A list of your recent responses."
emptyMessage="No responses found."
/>