mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 04:50:39 +00:00
Some checks failed
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 4s
Integration Tests (Replay) / discover-tests (push) Successful in 3s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Python Package Build Test / build (3.12) (push) Failing after 9s
Vector IO Integration Tests / test-matrix (3.12, remote::qdrant) (push) Failing after 15s
Vector IO Integration Tests / test-matrix (3.12, inline::sqlite-vec) (push) Failing after 16s
Unit Tests / unit-tests (3.13) (push) Failing after 12s
Test External API and Providers / test-external (venv) (push) Failing after 13s
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 20s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 20s
Integration Tests (Replay) / Integration Tests (, , , client=, vision=) (push) Failing after 14s
Vector IO Integration Tests / test-matrix (3.12, inline::milvus) (push) Failing after 20s
Python Package Build Test / build (3.13) (push) Failing after 15s
Vector IO Integration Tests / test-matrix (3.12, remote::pgvector) (push) Failing after 18s
Vector IO Integration Tests / test-matrix (3.12, remote::weaviate) (push) Failing after 23s
Vector IO Integration Tests / test-matrix (3.13, remote::pgvector) (push) Failing after 23s
Vector IO Integration Tests / test-matrix (3.13, remote::weaviate) (push) Failing after 14s
Vector IO Integration Tests / test-matrix (3.12, inline::faiss) (push) Failing after 17s
Vector IO Integration Tests / test-matrix (3.13, remote::chromadb) (push) Failing after 15s
Vector IO Integration Tests / test-matrix (3.12, remote::chromadb) (push) Failing after 17s
Vector IO Integration Tests / test-matrix (3.13, inline::milvus) (push) Failing after 15s
Vector IO Integration Tests / test-matrix (3.13, inline::faiss) (push) Failing after 16s
Vector IO Integration Tests / test-matrix (3.13, remote::qdrant) (push) Failing after 17s
Vector IO Integration Tests / test-matrix (3.13, inline::sqlite-vec) (push) Failing after 57s
Unit Tests / unit-tests (3.12) (push) Failing after 55s
Pre-commit / pre-commit (push) Successful in 2m10s
# What does this PR do? This PR updates the UI to create new: 1. `/files/{file_id}` 2. `files/{file_id}/contents` 3. `files/{file_id}/contents/{content_id}` The list of files are clickable which brings the user to the FIles Detail page The File Details page shows all of the content The content details page shows the individual chunk/content parsed These only use our existing OpenAI compatible APIs. I have a separate branch where I expose the embedding and the portal is correctly populated. I included the FE rendering code for that in this PR. 1. `vector-stores/{vector_store_id}/files/{file_id}` <img width="1913" height="1351" alt="Screenshot 2025-08-06 at 10 20 12 PM" src="https://github.com/user-attachments/assets/08010d5e-60c8-4bd9-9f3e-a2731ed1ad55" /> 2. `vector-stores/{vector_store_id}/files/{file_id}/contents` <img width="1920" height="1272" alt="Screenshot 2025-08-06 at 10 21 23 PM" src="https://github.com/user-attachments/assets/3b91e67b-5d64-4fe6-91b6-18f14587e850" /> 3. `vector-stores/{vector_store_id}/files/{file_id}/contents/{content_id}` <img width="1916" height="1273" alt="Screenshot 2025-08-06 at 10 21 45 PM" src="https://github.com/user-attachments/assets/d38ca996-e8d9-460c-9e39-7ff0cb5ec0dd" /> ## Test Plan I tested this locally and reviewed the code. I generated a significant share of the code with Claude and some manual intervention. After this, I'll begin adding tests to the UI. --------- Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
112 lines
3.6 KiB
TypeScript
112 lines
3.6 KiB
TypeScript
import type { FileContentResponse } from "llama-stack-client/resources/vector-stores/files";
|
|
import type { LlamaStackClient } from "llama-stack-client";
|
|
|
|
export type VectorStoreContent = FileContentResponse.Content;
|
|
export type VectorStoreContentsResponse = FileContentResponse;
|
|
|
|
export interface VectorStoreContentItem {
|
|
id: string;
|
|
object: string;
|
|
created_timestamp: number;
|
|
vector_store_id: string;
|
|
file_id: string;
|
|
content: VectorStoreContent;
|
|
metadata: Record<string, any>;
|
|
embedding?: number[];
|
|
}
|
|
|
|
export interface VectorStoreContentDeleteResponse {
|
|
id: string;
|
|
object: string;
|
|
deleted: boolean;
|
|
}
|
|
|
|
export interface VectorStoreListContentsResponse {
|
|
object: string;
|
|
data: VectorStoreContentItem[];
|
|
first_id?: string;
|
|
last_id?: string;
|
|
has_more: boolean;
|
|
}
|
|
|
|
export class ContentsAPI {
|
|
constructor(private client: LlamaStackClient) {}
|
|
|
|
async getFileContents(vectorStoreId: string, fileId: string): Promise<VectorStoreContentsResponse> {
|
|
return this.client.vectorStores.files.content(vectorStoreId, fileId);
|
|
}
|
|
|
|
async getContent(vectorStoreId: string, fileId: string, contentId: string): Promise<VectorStoreContentItem> {
|
|
const contentsResponse = await this.listContents(vectorStoreId, fileId);
|
|
const targetContent = contentsResponse.data.find(c => c.id === contentId);
|
|
|
|
if (!targetContent) {
|
|
throw new Error(`Content ${contentId} not found`);
|
|
}
|
|
|
|
return targetContent;
|
|
}
|
|
|
|
async updateContent(
|
|
vectorStoreId: string,
|
|
fileId: string,
|
|
contentId: string,
|
|
updates: { content?: string; metadata?: Record<string, any> }
|
|
): Promise<VectorStoreContentItem> {
|
|
throw new Error("Individual content updates not yet implemented in API");
|
|
}
|
|
|
|
async deleteContent(vectorStoreId: string, fileId: string, contentId: string): Promise<VectorStoreContentDeleteResponse> {
|
|
throw new Error("Individual content deletion not yet implemented in API");
|
|
}
|
|
|
|
async listContents(
|
|
vectorStoreId: string,
|
|
fileId: string,
|
|
options?: {
|
|
limit?: number;
|
|
order?: string;
|
|
after?: string;
|
|
before?: string;
|
|
}
|
|
): Promise<VectorStoreListContentsResponse> {
|
|
const fileContents = await this.client.vectorStores.files.content(vectorStoreId, fileId);
|
|
const contentItems: VectorStoreContentItem[] = [];
|
|
|
|
fileContents.content.forEach((content, contentIndex) => {
|
|
const rawContent = content as any;
|
|
|
|
// Extract actual fields from the API response
|
|
const embedding = rawContent.embedding || undefined;
|
|
const created_timestamp = rawContent.created_timestamp || rawContent.created_at || Date.now() / 1000;
|
|
const chunkMetadata = rawContent.chunk_metadata || {};
|
|
const contentId = rawContent.chunk_metadata?.chunk_id || rawContent.id || `content_${fileId}_${contentIndex}`;
|
|
const objectType = rawContent.object || 'vector_store.file.content';
|
|
contentItems.push({
|
|
id: contentId,
|
|
object: objectType,
|
|
created_timestamp: created_timestamp,
|
|
vector_store_id: vectorStoreId,
|
|
file_id: fileId,
|
|
content: content,
|
|
embedding: embedding,
|
|
metadata: {
|
|
...chunkMetadata, // chunk_metadata fields from API
|
|
content_length: content.type === 'text' ? content.text.length : 0,
|
|
},
|
|
});
|
|
});
|
|
|
|
// apply pagination if needed
|
|
let filteredItems = contentItems;
|
|
if (options?.limit) {
|
|
filteredItems = filteredItems.slice(0, options.limit);
|
|
}
|
|
|
|
return {
|
|
object: 'list',
|
|
data: filteredItems,
|
|
has_more: contentItems.length > (options?.limit || contentItems.length),
|
|
};
|
|
}
|
|
}
|