feat(ui): add infinite scroll pagination to chat completions/responses logs table

## Summary:

  This commit adds infinite scroll pagination to the
  chat completions and responses tables.


## Test Plan:
  1. Run unit tests: npm run test
  2. Manual testing: Navigate to chat
  completions/responses pages
  3. Verify infinite scroll triggers when approaching
  bottom
  4. Added playwright tests: npm run test:e2e
This commit is contained in:
Eric Huang 2025-06-17 16:26:06 -07:00
parent 15f630e5da
commit 66e217fea7
20 changed files with 1145 additions and 388 deletions

View file

@ -1,66 +1,7 @@
"use client";
import { useEffect, useState } from "react";
import type { ResponseListResponse } from "llama-stack-client/resources/responses/responses";
import { OpenAIResponse } from "@/lib/types";
import { ResponsesTable } from "@/components/responses/responses-table";
import { client } from "@/lib/client";
export default function ResponsesPage() {
const [responses, setResponses] = useState<OpenAIResponse[]>([]);
const [isLoading, setIsLoading] = useState<boolean>(true);
const [error, setError] = useState<Error | null>(null);
// Helper function to convert ResponseListResponse.Data to OpenAIResponse
const convertResponseListData = (
responseData: ResponseListResponse.Data,
): OpenAIResponse => {
return {
id: responseData.id,
created_at: responseData.created_at,
model: responseData.model,
object: responseData.object,
status: responseData.status,
output: responseData.output as OpenAIResponse["output"],
input: responseData.input as OpenAIResponse["input"],
error: responseData.error,
parallel_tool_calls: responseData.parallel_tool_calls,
previous_response_id: responseData.previous_response_id,
temperature: responseData.temperature,
top_p: responseData.top_p,
truncation: responseData.truncation,
user: responseData.user,
};
};
useEffect(() => {
const fetchResponses = async () => {
setIsLoading(true);
setError(null);
try {
const response = await client.responses.list();
const responseListData = response as ResponseListResponse;
const convertedResponses: OpenAIResponse[] = responseListData.data.map(
convertResponseListData,
);
setResponses(convertedResponses);
} catch (err) {
console.error("Error fetching responses:", err);
setError(
err instanceof Error ? err : new Error("Failed to fetch responses"),
);
setResponses([]);
} finally {
setIsLoading(false);
}
};
fetchResponses();
}, []);
return (
<ResponsesTable data={responses} isLoading={isLoading} error={error} />
);
return <ResponsesTable paginationOptions={{ limit: 20 }} />;
}