mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-24 05:14:30 +00:00
feat(ui): add views for Responses (#2293)
# What does this PR do? * Add responses list and detail views * Refactored components to be shared as much as possible between chat completions and responses ## Test Plan <img width="2014" alt="image" src="https://github.com/user-attachments/assets/6dee12ea-8876-4351-a6eb-2338058466ef" /> <img width="2021" alt="image" src="https://github.com/user-attachments/assets/6c7c71b8-25b7-4199-9c57-6960be5580c8" /> added tests
This commit is contained in:
parent
6352078e4b
commit
56e5ddb39f
34 changed files with 3282 additions and 380 deletions
|
@ -2,9 +2,9 @@
|
|||
|
||||
import { useEffect, useState } from "react";
|
||||
import { useParams } from "next/navigation";
|
||||
import LlamaStackClient from "llama-stack-client";
|
||||
import { ChatCompletion } from "@/lib/types";
|
||||
import { ChatCompletionDetailView } from "@/components/chat-completions/chat-completion-detail";
|
||||
import { client } from "@/lib/client";
|
||||
|
||||
export default function ChatCompletionDetailPage() {
|
||||
const params = useParams();
|
||||
|
@ -22,10 +22,6 @@ export default function ChatCompletionDetailPage() {
|
|||
return;
|
||||
}
|
||||
|
||||
const client = new LlamaStackClient({
|
||||
baseURL: process.env.NEXT_PUBLIC_LLAMA_STACK_BASE_URL,
|
||||
});
|
||||
|
||||
const fetchCompletionDetail = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue