mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 14:08:00 +00:00
Merge branch 'main' into chroma
This commit is contained in:
commit
d460fd64b4
17 changed files with 1419 additions and 88 deletions
|
@ -157,7 +157,7 @@ docker run \
|
|||
If you've set up your local development environment, you can also build the image using your local virtual environment.
|
||||
|
||||
```bash
|
||||
INFERENCE_MODEL=meta-llama/Llama-3.1-8b-Instruct
|
||||
INFERENCE_MODEL=meta-llama/Llama-3.1-8B-Instruct
|
||||
llama stack build --distro nvidia --image-type venv
|
||||
llama stack run ./run.yaml \
|
||||
--port 8321 \
|
||||
|
|
|
@ -12,6 +12,18 @@ That means you'll get fast and efficient vector retrieval.
|
|||
- Lightweight and easy to use
|
||||
- Fully integrated with Llama Stack
|
||||
- GPU support
|
||||
- **Vector search** - FAISS supports pure vector similarity search using embeddings
|
||||
|
||||
## Search Modes
|
||||
|
||||
**Supported:**
|
||||
- **Vector Search** (`mode="vector"`): Performs vector similarity search using embeddings
|
||||
|
||||
**Not Supported:**
|
||||
- **Keyword Search** (`mode="keyword"`): Not supported by FAISS
|
||||
- **Hybrid Search** (`mode="hybrid"`): Not supported by FAISS
|
||||
|
||||
> **Note**: FAISS is designed as a pure vector similarity search library. See the [FAISS GitHub repository](https://github.com/facebookresearch/faiss) for more details about FAISS's core functionality.
|
||||
|
||||
## Usage
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ That means you're not limited to storing vectors in memory or in a separate serv
|
|||
|
||||
- Easy to use
|
||||
- Fully integrated with Llama Stack
|
||||
- Supports all search modes: vector, keyword, and hybrid search (both inline and remote configurations)
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -101,6 +102,92 @@ vector_io:
|
|||
- **`client_pem_path`**: Path to the **client certificate** file (required for mTLS).
|
||||
- **`client_key_path`**: Path to the **client private key** file (required for mTLS).
|
||||
|
||||
## Search Modes
|
||||
|
||||
Milvus supports three different search modes for both inline and remote configurations:
|
||||
|
||||
### Vector Search
|
||||
Vector search uses semantic similarity to find the most relevant chunks based on embedding vectors. This is the default search mode and works well for finding conceptually similar content.
|
||||
|
||||
```python
|
||||
# Vector search example
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="What is machine learning?",
|
||||
search_mode="vector",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
### Keyword Search
|
||||
Keyword search uses traditional text-based matching to find chunks containing specific terms or phrases. This is useful when you need exact term matches.
|
||||
|
||||
```python
|
||||
# Keyword search example
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="Python programming language",
|
||||
search_mode="keyword",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
### Hybrid Search
|
||||
Hybrid search combines both vector and keyword search methods to provide more comprehensive results. It leverages the strengths of both semantic similarity and exact term matching.
|
||||
|
||||
#### Basic Hybrid Search
|
||||
```python
|
||||
# Basic hybrid search example (uses RRF ranker with default impact_factor=60.0)
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
**Note**: The default `impact_factor` value of 60.0 was empirically determined to be optimal in the original RRF research paper: ["Reciprocal Rank Fusion outperforms Condorcet and individual Rank Learning Methods"](https://plg.uwaterloo.ca/~gvcormac/cormacksigir09-rrf.pdf) (Cormack et al., 2009).
|
||||
|
||||
#### Hybrid Search with RRF (Reciprocal Rank Fusion) Ranker
|
||||
RRF combines rankings from vector and keyword search by using reciprocal ranks. The impact factor controls how much weight is given to higher-ranked results.
|
||||
|
||||
```python
|
||||
# Hybrid search with custom RRF parameters
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
ranking_options={
|
||||
"ranker": {
|
||||
"type": "rrf",
|
||||
"impact_factor": 100.0, # Higher values give more weight to top-ranked results
|
||||
}
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
#### Hybrid Search with Weighted Ranker
|
||||
Weighted ranker linearly combines normalized scores from vector and keyword search. The alpha parameter controls the balance between the two search methods.
|
||||
|
||||
```python
|
||||
# Hybrid search with weighted ranker
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
ranking_options={
|
||||
"ranker": {
|
||||
"type": "weighted",
|
||||
"alpha": 0.7, # 70% vector search, 30% keyword search
|
||||
}
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
For detailed documentation on RRF and Weighted rankers, please refer to the [Milvus Reranking Guide](https://milvus.io/docs/reranking.md).
|
||||
|
||||
## Documentation
|
||||
See the [Milvus documentation](https://milvus.io/docs/install-overview.md) for more details about Milvus in general.
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ docker run \
|
|||
If you've set up your local development environment, you can also build the image using your local virtual environment.
|
||||
|
||||
```bash
|
||||
INFERENCE_MODEL=meta-llama/Llama-3.1-8b-Instruct
|
||||
INFERENCE_MODEL=meta-llama/Llama-3.1-8B-Instruct
|
||||
llama stack build --distro nvidia --image-type venv
|
||||
llama stack run ./run.yaml \
|
||||
--port 8321 \
|
||||
|
|
|
@ -174,7 +174,9 @@ class FaissIndex(EmbeddingIndex):
|
|||
k: int,
|
||||
score_threshold: float,
|
||||
) -> QueryChunksResponse:
|
||||
raise NotImplementedError("Keyword search is not supported in FAISS")
|
||||
raise NotImplementedError(
|
||||
"Keyword search is not supported - underlying DB FAISS does not support this search mode"
|
||||
)
|
||||
|
||||
async def query_hybrid(
|
||||
self,
|
||||
|
@ -185,7 +187,9 @@ class FaissIndex(EmbeddingIndex):
|
|||
reranker_type: str,
|
||||
reranker_params: dict[str, Any] | None = None,
|
||||
) -> QueryChunksResponse:
|
||||
raise NotImplementedError("Hybrid search is not supported in FAISS")
|
||||
raise NotImplementedError(
|
||||
"Hybrid search is not supported - underlying DB FAISS does not support this search mode"
|
||||
)
|
||||
|
||||
|
||||
class FaissVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate):
|
||||
|
|
|
@ -45,6 +45,18 @@ That means you'll get fast and efficient vector retrieval.
|
|||
- Lightweight and easy to use
|
||||
- Fully integrated with Llama Stack
|
||||
- GPU support
|
||||
- **Vector search** - FAISS supports pure vector similarity search using embeddings
|
||||
|
||||
## Search Modes
|
||||
|
||||
**Supported:**
|
||||
- **Vector Search** (`mode="vector"`): Performs vector similarity search using embeddings
|
||||
|
||||
**Not Supported:**
|
||||
- **Keyword Search** (`mode="keyword"`): Not supported by FAISS
|
||||
- **Hybrid Search** (`mode="hybrid"`): Not supported by FAISS
|
||||
|
||||
> **Note**: FAISS is designed as a pure vector similarity search library. See the [FAISS GitHub repository](https://github.com/facebookresearch/faiss) for more details about FAISS's core functionality.
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -535,6 +547,7 @@ That means you're not limited to storing vectors in memory or in a separate serv
|
|||
|
||||
- Easy to use
|
||||
- Fully integrated with Llama Stack
|
||||
- Supports all search modes: vector, keyword, and hybrid search (both inline and remote configurations)
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -625,6 +638,92 @@ vector_io:
|
|||
- **`client_pem_path`**: Path to the **client certificate** file (required for mTLS).
|
||||
- **`client_key_path`**: Path to the **client private key** file (required for mTLS).
|
||||
|
||||
## Search Modes
|
||||
|
||||
Milvus supports three different search modes for both inline and remote configurations:
|
||||
|
||||
### Vector Search
|
||||
Vector search uses semantic similarity to find the most relevant chunks based on embedding vectors. This is the default search mode and works well for finding conceptually similar content.
|
||||
|
||||
```python
|
||||
# Vector search example
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="What is machine learning?",
|
||||
search_mode="vector",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
### Keyword Search
|
||||
Keyword search uses traditional text-based matching to find chunks containing specific terms or phrases. This is useful when you need exact term matches.
|
||||
|
||||
```python
|
||||
# Keyword search example
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="Python programming language",
|
||||
search_mode="keyword",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
### Hybrid Search
|
||||
Hybrid search combines both vector and keyword search methods to provide more comprehensive results. It leverages the strengths of both semantic similarity and exact term matching.
|
||||
|
||||
#### Basic Hybrid Search
|
||||
```python
|
||||
# Basic hybrid search example (uses RRF ranker with default impact_factor=60.0)
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
)
|
||||
```
|
||||
|
||||
**Note**: The default `impact_factor` value of 60.0 was empirically determined to be optimal in the original RRF research paper: ["Reciprocal Rank Fusion outperforms Condorcet and individual Rank Learning Methods"](https://plg.uwaterloo.ca/~gvcormac/cormacksigir09-rrf.pdf) (Cormack et al., 2009).
|
||||
|
||||
#### Hybrid Search with RRF (Reciprocal Rank Fusion) Ranker
|
||||
RRF combines rankings from vector and keyword search by using reciprocal ranks. The impact factor controls how much weight is given to higher-ranked results.
|
||||
|
||||
```python
|
||||
# Hybrid search with custom RRF parameters
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
ranking_options={
|
||||
"ranker": {
|
||||
"type": "rrf",
|
||||
"impact_factor": 100.0, # Higher values give more weight to top-ranked results
|
||||
}
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
#### Hybrid Search with Weighted Ranker
|
||||
Weighted ranker linearly combines normalized scores from vector and keyword search. The alpha parameter controls the balance between the two search methods.
|
||||
|
||||
```python
|
||||
# Hybrid search with weighted ranker
|
||||
search_response = client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="neural networks in Python",
|
||||
search_mode="hybrid",
|
||||
max_num_results=5,
|
||||
ranking_options={
|
||||
"ranker": {
|
||||
"type": "weighted",
|
||||
"alpha": 0.7, # 70% vector search, 30% keyword search
|
||||
}
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
For detailed documentation on RRF and Weighted rankers, please refer to the [Milvus Reranking Guide](https://milvus.io/docs/reranking.md).
|
||||
|
||||
## Documentation
|
||||
See the [Milvus documentation](https://milvus.io/docs/install-overview.md) for more details about Milvus in general.
|
||||
|
||||
|
|
|
@ -13,7 +13,9 @@ LLM_MODEL_IDS = [
|
|||
"gemini-1.5-flash",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-2.0-flash",
|
||||
"gemini-2.0-flash-lite",
|
||||
"gemini-2.5-flash",
|
||||
"gemini-2.5-flash-lite",
|
||||
"gemini-2.5-pro",
|
||||
]
|
||||
|
||||
|
|
|
@ -42,8 +42,8 @@ client.initialize()
|
|||
### Create Completion
|
||||
|
||||
```python
|
||||
response = client.completion(
|
||||
model_id="meta-llama/Llama-3.1-8b-Instruct",
|
||||
response = client.inference.completion(
|
||||
model_id="meta-llama/Llama-3.1-8B-Instruct",
|
||||
content="Complete the sentence using one word: Roses are red, violets are :",
|
||||
stream=False,
|
||||
sampling_params={
|
||||
|
@ -56,8 +56,8 @@ print(f"Response: {response.content}")
|
|||
### Create Chat Completion
|
||||
|
||||
```python
|
||||
response = client.chat_completion(
|
||||
model_id="meta-llama/Llama-3.1-8b-Instruct",
|
||||
response = client.inference.chat_completion(
|
||||
model_id="meta-llama/Llama-3.1-8B-Instruct",
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
|
@ -78,8 +78,10 @@ print(f"Response: {response.completion_message.content}")
|
|||
|
||||
### Create Embeddings
|
||||
```python
|
||||
response = client.embeddings(
|
||||
model_id="meta-llama/Llama-3.1-8b-Instruct", contents=["foo", "bar", "baz"]
|
||||
response = client.inference.embeddings(
|
||||
model_id="nvidia/llama-3.2-nv-embedqa-1b-v2",
|
||||
contents=["What is the capital of France?"],
|
||||
task_type="query",
|
||||
)
|
||||
print(f"Embeddings: {response.embeddings}")
|
||||
```
|
|
@ -9,7 +9,9 @@ import contextvars
|
|||
import logging
|
||||
import queue
|
||||
import random
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from datetime import UTC, datetime
|
||||
from functools import wraps
|
||||
|
@ -30,6 +32,16 @@ from llama_stack.providers.utils.telemetry.trace_protocol import serialize_value
|
|||
|
||||
logger = get_logger(__name__, category="core")
|
||||
|
||||
# Fallback logger that does NOT propagate to TelemetryHandler to avoid recursion
|
||||
_fallback_logger = logging.getLogger("llama_stack.telemetry.background")
|
||||
if not _fallback_logger.handlers:
|
||||
_fallback_logger.propagate = False
|
||||
_fallback_logger.setLevel(logging.ERROR)
|
||||
_fallback_handler = logging.StreamHandler(sys.stderr)
|
||||
_fallback_handler.setLevel(logging.ERROR)
|
||||
_fallback_handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(message)s"))
|
||||
_fallback_logger.addHandler(_fallback_handler)
|
||||
|
||||
|
||||
INVALID_SPAN_ID = 0x0000000000000000
|
||||
INVALID_TRACE_ID = 0x00000000000000000000000000000000
|
||||
|
@ -79,19 +91,32 @@ def generate_trace_id() -> str:
|
|||
CURRENT_TRACE_CONTEXT = contextvars.ContextVar("trace_context", default=None)
|
||||
BACKGROUND_LOGGER = None
|
||||
|
||||
LOG_QUEUE_FULL_LOG_INTERVAL_SECONDS = 60.0
|
||||
|
||||
|
||||
class BackgroundLogger:
|
||||
def __init__(self, api: Telemetry, capacity: int = 100000):
|
||||
self.api = api
|
||||
self.log_queue = queue.Queue(maxsize=capacity)
|
||||
self.log_queue: queue.Queue[Any] = queue.Queue(maxsize=capacity)
|
||||
self.worker_thread = threading.Thread(target=self._process_logs, daemon=True)
|
||||
self.worker_thread.start()
|
||||
self._last_queue_full_log_time: float = 0.0
|
||||
self._dropped_since_last_notice: int = 0
|
||||
|
||||
def log_event(self, event):
|
||||
try:
|
||||
self.log_queue.put_nowait(event)
|
||||
except queue.Full:
|
||||
logger.error("Log queue is full, dropping event")
|
||||
# Aggregate drops and emit at most once per interval via fallback logger
|
||||
self._dropped_since_last_notice += 1
|
||||
current_time = time.time()
|
||||
if current_time - self._last_queue_full_log_time >= LOG_QUEUE_FULL_LOG_INTERVAL_SECONDS:
|
||||
_fallback_logger.error(
|
||||
"Log queue is full; dropped %d events since last notice",
|
||||
self._dropped_since_last_notice,
|
||||
)
|
||||
self._last_queue_full_log_time = current_time
|
||||
self._dropped_since_last_notice = 0
|
||||
|
||||
def _process_logs(self):
|
||||
while True:
|
||||
|
|
|
@ -0,0 +1,383 @@
|
|||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useAuthClient } from "@/hooks/use-auth-client";
|
||||
import { ContentsAPI, VectorStoreContentItem } from "@/lib/contents-api";
|
||||
import type { VectorStore } from "llama-stack-client/resources/vector-stores/vector-stores";
|
||||
import type { VectorStoreFile } from "llama-stack-client/resources/vector-stores/files";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Edit, Save, X, Trash2 } from "lucide-react";
|
||||
import {
|
||||
DetailLoadingView,
|
||||
DetailErrorView,
|
||||
DetailNotFoundView,
|
||||
DetailLayout,
|
||||
PropertiesCard,
|
||||
PropertyItem,
|
||||
} from "@/components/layout/detail-layout";
|
||||
import { PageBreadcrumb, BreadcrumbSegment } from "@/components/layout/page-breadcrumb";
|
||||
|
||||
export default function ContentDetailPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter();
|
||||
const vectorStoreId = params.id as string;
|
||||
const fileId = params.fileId as string;
|
||||
const contentId = params.contentId as string;
|
||||
const client = useAuthClient();
|
||||
|
||||
const getTextFromContent = (content: any): string => {
|
||||
if (typeof content === 'string') {
|
||||
return content;
|
||||
} else if (content && content.type === 'text') {
|
||||
return content.text;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const [store, setStore] = useState<VectorStore | null>(null);
|
||||
const [file, setFile] = useState<VectorStoreFile | null>(null);
|
||||
const [content, setContent] = useState<VectorStoreContentItem | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<Error | null>(null);
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [editedContent, setEditedContent] = useState("");
|
||||
const [editedMetadata, setEditedMetadata] = useState<Record<string, any>>({});
|
||||
const [isEditingEmbedding, setIsEditingEmbedding] = useState(false);
|
||||
const [editedEmbedding, setEditedEmbedding] = useState<number[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId || !fileId || !contentId) return;
|
||||
|
||||
const fetchData = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const [storeResponse, fileResponse] = await Promise.all([
|
||||
client.vectorStores.retrieve(vectorStoreId),
|
||||
client.vectorStores.files.retrieve(vectorStoreId, fileId),
|
||||
]);
|
||||
|
||||
setStore(storeResponse as VectorStore);
|
||||
setFile(fileResponse as VectorStoreFile);
|
||||
|
||||
const contentsAPI = new ContentsAPI(client);
|
||||
const contentsResponse = await contentsAPI.listContents(vectorStoreId, fileId);
|
||||
const targetContent = contentsResponse.data.find(c => c.id === contentId);
|
||||
|
||||
if (targetContent) {
|
||||
setContent(targetContent);
|
||||
setEditedContent(getTextFromContent(targetContent.content));
|
||||
setEditedMetadata({ ...targetContent.metadata });
|
||||
setEditedEmbedding(targetContent.embedding || []);
|
||||
} else {
|
||||
throw new Error(`Content ${contentId} not found`);
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err : new Error("Failed to load content."));
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
fetchData();
|
||||
}, [vectorStoreId, fileId, contentId, client]);
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!content) return;
|
||||
|
||||
try {
|
||||
const updates: { content?: string; metadata?: Record<string, any> } = {};
|
||||
|
||||
if (editedContent !== getTextFromContent(content.content)) {
|
||||
updates.content = editedContent;
|
||||
}
|
||||
|
||||
if (JSON.stringify(editedMetadata) !== JSON.stringify(content.metadata)) {
|
||||
updates.metadata = editedMetadata;
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length > 0) {
|
||||
const contentsAPI = new ContentsAPI(client);
|
||||
const updatedContent = await contentsAPI.updateContent(vectorStoreId, fileId, contentId, updates);
|
||||
setContent(updatedContent);
|
||||
}
|
||||
|
||||
setIsEditing(false);
|
||||
} catch (err) {
|
||||
console.error('Failed to update content:', err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDelete = async () => {
|
||||
if (!confirm('Are you sure you want to delete this content?')) return;
|
||||
|
||||
try {
|
||||
const contentsAPI = new ContentsAPI(client);
|
||||
await contentsAPI.deleteContent(vectorStoreId, fileId, contentId);
|
||||
router.push(`/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents`);
|
||||
} catch (err) {
|
||||
console.error('Failed to delete content:', err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
setEditedContent(content ? getTextFromContent(content.content) : "");
|
||||
setEditedMetadata({ ...content?.metadata });
|
||||
setEditedEmbedding(content?.embedding || []);
|
||||
setIsEditing(false);
|
||||
setIsEditingEmbedding(false);
|
||||
};
|
||||
|
||||
const title = `Content: ${contentId}`;
|
||||
|
||||
const breadcrumbSegments: BreadcrumbSegment[] = [
|
||||
{ label: "Vector Stores", href: "/logs/vector-stores" },
|
||||
{ label: store?.name || vectorStoreId, href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: "Files", href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: fileId, href: `/logs/vector-stores/${vectorStoreId}/files/${fileId}` },
|
||||
{ label: "Contents", href: `/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents` },
|
||||
{ label: contentId },
|
||||
];
|
||||
|
||||
if (error) {
|
||||
return <DetailErrorView title={title} id={contentId} error={error} />;
|
||||
}
|
||||
if (isLoading) {
|
||||
return <DetailLoadingView title={title} />;
|
||||
}
|
||||
if (!content) {
|
||||
return <DetailNotFoundView title={title} id={contentId} />;
|
||||
}
|
||||
|
||||
const mainContent = (
|
||||
<>
|
||||
<Card>
|
||||
<CardHeader className="flex flex-row items-center justify-between">
|
||||
<CardTitle>Content</CardTitle>
|
||||
<div className="flex gap-2">
|
||||
{isEditing ? (
|
||||
<>
|
||||
<Button size="sm" onClick={handleSave}>
|
||||
<Save className="h-4 w-4 mr-1" />
|
||||
Save
|
||||
</Button>
|
||||
<Button size="sm" variant="outline" onClick={handleCancel}>
|
||||
<X className="h-4 w-4 mr-1" />
|
||||
Cancel
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Button size="sm" onClick={() => setIsEditing(true)}>
|
||||
<Edit className="h-4 w-4 mr-1" />
|
||||
Edit
|
||||
</Button>
|
||||
<Button size="sm" variant="destructive" onClick={handleDelete}>
|
||||
<Trash2 className="h-4 w-4 mr-1" />
|
||||
Delete
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isEditing ? (
|
||||
<textarea
|
||||
value={editedContent}
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
className="w-full h-64 p-3 border rounded-md resize-none font-mono text-sm"
|
||||
placeholder="Enter content..."
|
||||
/>
|
||||
) : (
|
||||
<div className="p-3 bg-gray-50 dark:bg-gray-800 rounded-md">
|
||||
<pre className="whitespace-pre-wrap font-mono text-sm text-gray-900 dark:text-gray-100">
|
||||
{getTextFromContent(content.content)}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card>
|
||||
<CardHeader className="flex flex-row items-center justify-between">
|
||||
<CardTitle>Content Embedding</CardTitle>
|
||||
<div className="flex gap-2">
|
||||
{isEditingEmbedding ? (
|
||||
<>
|
||||
<Button size="sm" onClick={() => {
|
||||
setIsEditingEmbedding(false);
|
||||
}}>
|
||||
<Save className="h-4 w-4 mr-1" />
|
||||
Save
|
||||
</Button>
|
||||
<Button size="sm" variant="outline" onClick={() => {
|
||||
setEditedEmbedding(content?.embedding || []);
|
||||
setIsEditingEmbedding(false);
|
||||
}}>
|
||||
<X className="h-4 w-4 mr-1" />
|
||||
Cancel
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<Button size="sm" onClick={() => setIsEditingEmbedding(true)}>
|
||||
<Edit className="h-4 w-4 mr-1" />
|
||||
Edit
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{content?.embedding && content.embedding.length > 0 ? (
|
||||
isEditingEmbedding ? (
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Embedding ({editedEmbedding.length}D vector):
|
||||
</p>
|
||||
<textarea
|
||||
value={JSON.stringify(editedEmbedding, null, 2)}
|
||||
onChange={(e) => {
|
||||
try {
|
||||
const parsed = JSON.parse(e.target.value);
|
||||
if (Array.isArray(parsed) && parsed.every(v => typeof v === 'number')) {
|
||||
setEditedEmbedding(parsed);
|
||||
}
|
||||
} catch {
|
||||
}
|
||||
}}
|
||||
className="w-full h-32 p-3 border rounded-md resize-none font-mono text-xs"
|
||||
placeholder="Enter embedding as JSON array..."
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-mono text-xs bg-gray-100 dark:bg-gray-800 rounded px-2 py-1">
|
||||
{content.embedding.length}D vector
|
||||
</span>
|
||||
</div>
|
||||
<div className="p-3 bg-gray-50 dark:bg-gray-800 rounded-md max-h-32 overflow-y-auto">
|
||||
<pre className="whitespace-pre-wrap font-mono text-xs text-gray-900 dark:text-gray-100">
|
||||
[{content.embedding.slice(0, 20).map(v => v.toFixed(6)).join(', ')}
|
||||
{content.embedding.length > 20 ? `\n... and ${content.embedding.length - 20} more values` : ''}]
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
) : (
|
||||
<p className="text-gray-500 italic text-sm">
|
||||
No embedding available for this content.
|
||||
</p>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Metadata</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isEditing ? (
|
||||
<div className="space-y-2">
|
||||
{Object.entries(editedMetadata).map(([key, value]) => (
|
||||
<div key={key} className="flex gap-2">
|
||||
<Input
|
||||
value={key}
|
||||
onChange={(e) => {
|
||||
const newMetadata = { ...editedMetadata };
|
||||
delete newMetadata[key];
|
||||
newMetadata[e.target.value] = value;
|
||||
setEditedMetadata(newMetadata);
|
||||
}}
|
||||
placeholder="Key"
|
||||
className="flex-1"
|
||||
/>
|
||||
<Input
|
||||
value={typeof value === 'string' ? value : JSON.stringify(value)}
|
||||
onChange={(e) => {
|
||||
setEditedMetadata({
|
||||
...editedMetadata,
|
||||
[key]: e.target.value
|
||||
});
|
||||
}}
|
||||
placeholder="Value"
|
||||
className="flex-1"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
<Button
|
||||
size="sm"
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setEditedMetadata({
|
||||
...editedMetadata,
|
||||
['']: ''
|
||||
});
|
||||
}}
|
||||
>
|
||||
Add Field
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-2">
|
||||
{Object.entries(content.metadata).map(([key, value]) => (
|
||||
<div key={key} className="flex justify-between py-1">
|
||||
<span className="font-medium text-gray-600">{key}:</span>
|
||||
<span className="font-mono text-sm">
|
||||
{typeof value === 'string' ? value : JSON.stringify(value)}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
|
||||
const sidebar = (
|
||||
<PropertiesCard>
|
||||
<PropertyItem label="Content ID" value={contentId} />
|
||||
<PropertyItem label="File ID" value={fileId} />
|
||||
<PropertyItem label="Vector Store ID" value={vectorStoreId} />
|
||||
<PropertyItem label="Object Type" value={content.object} />
|
||||
<PropertyItem
|
||||
label="Created"
|
||||
value={new Date(content.created_timestamp * 1000).toLocaleString()}
|
||||
/>
|
||||
<PropertyItem
|
||||
label="Content Length"
|
||||
value={`${getTextFromContent(content.content).length} chars`}
|
||||
/>
|
||||
{content.metadata.chunk_window && (
|
||||
<PropertyItem
|
||||
label="Position"
|
||||
value={content.metadata.chunk_window}
|
||||
/>
|
||||
)}
|
||||
{file && (
|
||||
<>
|
||||
<PropertyItem label="File Status" value={file.status} />
|
||||
<PropertyItem label="File Usage" value={`${file.usage_bytes} bytes`} />
|
||||
</>
|
||||
)}
|
||||
{store && (
|
||||
<>
|
||||
<PropertyItem label="Store Name" value={store.name || ""} />
|
||||
<PropertyItem
|
||||
label="Provider ID"
|
||||
value={(store.metadata.provider_id as string) || ""}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</PropertiesCard>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageBreadcrumb segments={breadcrumbSegments} />
|
||||
<DetailLayout title={title} mainContent={mainContent} sidebar={sidebar} />
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,297 @@
|
|||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useAuthClient } from "@/hooks/use-auth-client";
|
||||
import { ContentsAPI, VectorStoreContentItem } from "@/lib/contents-api";
|
||||
import type { VectorStore } from "llama-stack-client/resources/vector-stores/vector-stores";
|
||||
import type { VectorStoreFile } from "llama-stack-client/resources/vector-stores/files";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Edit, Trash2, Eye } from "lucide-react";
|
||||
import {
|
||||
DetailLoadingView,
|
||||
DetailErrorView,
|
||||
DetailNotFoundView,
|
||||
DetailLayout,
|
||||
PropertiesCard,
|
||||
PropertyItem,
|
||||
} from "@/components/layout/detail-layout";
|
||||
import { PageBreadcrumb, BreadcrumbSegment } from "@/components/layout/page-breadcrumb";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCaption,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
|
||||
export default function ContentsListPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter();
|
||||
const vectorStoreId = params.id as string;
|
||||
const fileId = params.fileId as string;
|
||||
const client = useAuthClient();
|
||||
|
||||
const getTextFromContent = (content: any): string => {
|
||||
if (typeof content === 'string') {
|
||||
return content;
|
||||
} else if (content && content.type === 'text') {
|
||||
return content.text;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const [store, setStore] = useState<VectorStore | null>(null);
|
||||
const [file, setFile] = useState<VectorStoreFile | null>(null);
|
||||
const [contents, setContents] = useState<VectorStoreContentItem[]>([]);
|
||||
const [isLoadingStore, setIsLoadingStore] = useState(true);
|
||||
const [isLoadingFile, setIsLoadingFile] = useState(true);
|
||||
const [isLoadingContents, setIsLoadingContents] = useState(true);
|
||||
const [errorStore, setErrorStore] = useState<Error | null>(null);
|
||||
const [errorFile, setErrorFile] = useState<Error | null>(null);
|
||||
const [errorContents, setErrorContents] = useState<Error | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId) return;
|
||||
|
||||
const fetchStore = async () => {
|
||||
setIsLoadingStore(true);
|
||||
setErrorStore(null);
|
||||
try {
|
||||
const response = await client.vectorStores.retrieve(vectorStoreId);
|
||||
setStore(response as VectorStore);
|
||||
} catch (err) {
|
||||
setErrorStore(err instanceof Error ? err : new Error("Failed to load vector store."));
|
||||
} finally {
|
||||
setIsLoadingStore(false);
|
||||
}
|
||||
};
|
||||
fetchStore();
|
||||
}, [vectorStoreId, client]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId || !fileId) return;
|
||||
|
||||
const fetchFile = async () => {
|
||||
setIsLoadingFile(true);
|
||||
setErrorFile(null);
|
||||
try {
|
||||
const response = await client.vectorStores.files.retrieve(vectorStoreId, fileId);
|
||||
setFile(response as VectorStoreFile);
|
||||
} catch (err) {
|
||||
setErrorFile(err instanceof Error ? err : new Error("Failed to load file."));
|
||||
} finally {
|
||||
setIsLoadingFile(false);
|
||||
}
|
||||
};
|
||||
fetchFile();
|
||||
}, [vectorStoreId, fileId, client]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId || !fileId) return;
|
||||
|
||||
const fetchContents = async () => {
|
||||
setIsLoadingContents(true);
|
||||
setErrorContents(null);
|
||||
try {
|
||||
const contentsAPI = new ContentsAPI(client);
|
||||
const contentsResponse = await contentsAPI.listContents(vectorStoreId, fileId, { limit: 100 });
|
||||
setContents(contentsResponse.data);
|
||||
} catch (err) {
|
||||
setErrorContents(err instanceof Error ? err : new Error("Failed to load contents."));
|
||||
} finally {
|
||||
setIsLoadingContents(false);
|
||||
}
|
||||
};
|
||||
fetchContents();
|
||||
}, [vectorStoreId, fileId, client]);
|
||||
|
||||
const handleDeleteContent = async (contentId: string) => {
|
||||
try {
|
||||
const contentsAPI = new ContentsAPI(client);
|
||||
await contentsAPI.deleteContent(vectorStoreId, fileId, contentId);
|
||||
setContents(contents.filter(content => content.id !== contentId));
|
||||
} catch (err) {
|
||||
console.error('Failed to delete content:', err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleViewContent = (contentId: string) => {
|
||||
router.push(`/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents/${contentId}`);
|
||||
};
|
||||
|
||||
const title = `Contents in File: ${fileId}`;
|
||||
|
||||
const breadcrumbSegments: BreadcrumbSegment[] = [
|
||||
{ label: "Vector Stores", href: "/logs/vector-stores" },
|
||||
{ label: store?.name || vectorStoreId, href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: "Files", href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: fileId, href: `/logs/vector-stores/${vectorStoreId}/files/${fileId}` },
|
||||
{ label: "Contents" },
|
||||
];
|
||||
|
||||
if (errorStore) {
|
||||
return <DetailErrorView title={title} id={vectorStoreId} error={errorStore} />;
|
||||
}
|
||||
if (isLoadingStore) {
|
||||
return <DetailLoadingView title={title} />;
|
||||
}
|
||||
if (!store) {
|
||||
return <DetailNotFoundView title={title} id={vectorStoreId} />;
|
||||
}
|
||||
|
||||
const mainContent = (
|
||||
<>
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Content Chunks ({contents.length})</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isLoadingContents ? (
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-4 w-full" />
|
||||
<Skeleton className="h-4 w-3/4" />
|
||||
<Skeleton className="h-4 w-1/2" />
|
||||
</div>
|
||||
) : errorContents ? (
|
||||
<div className="text-destructive text-sm">
|
||||
Error loading contents: {errorContents.message}
|
||||
</div>
|
||||
) : contents.length > 0 ? (
|
||||
<Table>
|
||||
<TableCaption>Contents in this file</TableCaption>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Content ID</TableHead>
|
||||
<TableHead>Content Preview</TableHead>
|
||||
<TableHead>Embedding</TableHead>
|
||||
<TableHead>Position</TableHead>
|
||||
<TableHead>Created</TableHead>
|
||||
<TableHead>Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{contents.map((content) => (
|
||||
<TableRow key={content.id}>
|
||||
<TableCell className="font-mono text-xs">
|
||||
<Button
|
||||
variant="link"
|
||||
className="p-0 h-auto font-mono text-xs text-blue-600 hover:text-blue-800 dark:text-blue-400 dark:hover:text-blue-300"
|
||||
onClick={() => handleViewContent(content.id)}
|
||||
title={content.id}
|
||||
>
|
||||
{content.id.substring(0, 10)}...
|
||||
</Button>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className="max-w-md">
|
||||
<p className="text-sm truncate" title={getTextFromContent(content.content)}>
|
||||
{getTextFromContent(content.content)}
|
||||
</p>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell className="text-xs text-gray-500">
|
||||
{content.embedding && content.embedding.length > 0 ? (
|
||||
<div className="max-w-xs">
|
||||
<span className="font-mono text-xs bg-gray-100 dark:bg-gray-800 rounded px-1 py-0.5" title={`${content.embedding.length}D vector: [${content.embedding.slice(0, 3).map(v => v.toFixed(3)).join(', ')}...]`}>
|
||||
[{content.embedding.slice(0, 3).map(v => v.toFixed(3)).join(', ')}...] ({content.embedding.length}D)
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<span className="text-gray-400 dark:text-gray-500 italic">No embedding</span>
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell className="text-xs text-gray-500">
|
||||
{content.metadata.chunk_window
|
||||
? content.metadata.chunk_window
|
||||
: `${content.metadata.content_length || 0} chars`}
|
||||
</TableCell>
|
||||
<TableCell className="text-xs">
|
||||
{new Date(content.created_timestamp * 1000).toLocaleString()}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0"
|
||||
title="View content details"
|
||||
onClick={() => handleViewContent(content.id)}
|
||||
>
|
||||
<Eye className="h-3 w-3" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0"
|
||||
title="Edit content"
|
||||
onClick={() => handleViewContent(content.id)}
|
||||
>
|
||||
<Edit className="h-3 w-3" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0 text-destructive hover:text-destructive"
|
||||
title="Delete content"
|
||||
onClick={() => handleDeleteContent(content.id)}
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</Button>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
) : (
|
||||
<p className="text-gray-500 italic text-sm">
|
||||
No contents found for this file.
|
||||
</p>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
|
||||
const sidebar = (
|
||||
<PropertiesCard>
|
||||
<PropertyItem label="File ID" value={fileId} />
|
||||
<PropertyItem label="Vector Store ID" value={vectorStoreId} />
|
||||
{file && (
|
||||
<>
|
||||
<PropertyItem label="Status" value={file.status} />
|
||||
<PropertyItem
|
||||
label="Created"
|
||||
value={new Date(file.created_at * 1000).toLocaleString()}
|
||||
/>
|
||||
<PropertyItem label="Usage Bytes" value={file.usage_bytes} />
|
||||
<PropertyItem
|
||||
label="Chunking Strategy"
|
||||
value={file.chunking_strategy.type}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{store && (
|
||||
<>
|
||||
<PropertyItem label="Store Name" value={store.name || ""} />
|
||||
<PropertyItem
|
||||
label="Provider ID"
|
||||
value={(store.metadata.provider_id as string) || ""}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</PropertiesCard>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageBreadcrumb segments={breadcrumbSegments} />
|
||||
<DetailLayout title={title} mainContent={mainContent} sidebar={sidebar} />
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,258 @@
|
|||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useAuthClient } from "@/hooks/use-auth-client";
|
||||
import type { VectorStore } from "llama-stack-client/resources/vector-stores/vector-stores";
|
||||
import type { VectorStoreFile, FileContentResponse } from "llama-stack-client/resources/vector-stores/files";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Skeleton } from '@/components/ui/skeleton';
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { List } from "lucide-react";
|
||||
import {
|
||||
DetailLoadingView,
|
||||
DetailErrorView,
|
||||
DetailNotFoundView,
|
||||
DetailLayout,
|
||||
PropertiesCard,
|
||||
PropertyItem,
|
||||
} from "@/components/layout/detail-layout";
|
||||
import { PageBreadcrumb, BreadcrumbSegment } from "@/components/layout/page-breadcrumb";
|
||||
|
||||
export default function FileDetailPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter();
|
||||
const vectorStoreId = params.id as string;
|
||||
const fileId = params.fileId as string;
|
||||
const client = useAuthClient();
|
||||
|
||||
const [store, setStore] = useState<VectorStore | null>(null);
|
||||
const [file, setFile] = useState<VectorStoreFile | null>(null);
|
||||
const [contents, setContents] = useState<FileContentResponse | null>(null);
|
||||
const [isLoadingStore, setIsLoadingStore] = useState(true);
|
||||
const [isLoadingFile, setIsLoadingFile] = useState(true);
|
||||
const [isLoadingContents, setIsLoadingContents] = useState(true);
|
||||
const [errorStore, setErrorStore] = useState<Error | null>(null);
|
||||
const [errorFile, setErrorFile] = useState<Error | null>(null);
|
||||
const [errorContents, setErrorContents] = useState<Error | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId) return;
|
||||
|
||||
const fetchStore = async () => {
|
||||
setIsLoadingStore(true);
|
||||
setErrorStore(null);
|
||||
try {
|
||||
const response = await client.vectorStores.retrieve(vectorStoreId);
|
||||
setStore(response as VectorStore);
|
||||
} catch (err) {
|
||||
setErrorStore(err instanceof Error ? err : new Error("Failed to load vector store."));
|
||||
} finally {
|
||||
setIsLoadingStore(false);
|
||||
}
|
||||
};
|
||||
fetchStore();
|
||||
}, [vectorStoreId, client]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId || !fileId) return;
|
||||
|
||||
const fetchFile = async () => {
|
||||
setIsLoadingFile(true);
|
||||
setErrorFile(null);
|
||||
try {
|
||||
const response = await client.vectorStores.files.retrieve(vectorStoreId, fileId);
|
||||
setFile(response as VectorStoreFile);
|
||||
} catch (err) {
|
||||
setErrorFile(err instanceof Error ? err : new Error("Failed to load file."));
|
||||
} finally {
|
||||
setIsLoadingFile(false);
|
||||
}
|
||||
};
|
||||
fetchFile();
|
||||
}, [vectorStoreId, fileId, client]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vectorStoreId || !fileId) return;
|
||||
|
||||
const fetchContents = async () => {
|
||||
setIsLoadingContents(true);
|
||||
setErrorContents(null);
|
||||
try {
|
||||
const response = await client.vectorStores.files.content(vectorStoreId, fileId);
|
||||
setContents(response);
|
||||
} catch (err) {
|
||||
setErrorContents(err instanceof Error ? err : new Error("Failed to load contents."));
|
||||
} finally {
|
||||
setIsLoadingContents(false);
|
||||
}
|
||||
};
|
||||
fetchContents();
|
||||
}, [vectorStoreId, fileId, client]);
|
||||
|
||||
const handleViewContents = () => {
|
||||
router.push(`/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents`);
|
||||
};
|
||||
|
||||
const title = `File: ${fileId}`;
|
||||
|
||||
const breadcrumbSegments: BreadcrumbSegment[] = [
|
||||
{ label: "Vector Stores", href: "/logs/vector-stores" },
|
||||
{ label: store?.name || vectorStoreId, href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: "Files", href: `/logs/vector-stores/${vectorStoreId}` },
|
||||
{ label: fileId },
|
||||
];
|
||||
|
||||
if (errorStore) {
|
||||
return <DetailErrorView title={title} id={vectorStoreId} error={errorStore} />;
|
||||
}
|
||||
if (isLoadingStore) {
|
||||
return <DetailLoadingView title={title} />;
|
||||
}
|
||||
if (!store) {
|
||||
return <DetailNotFoundView title={title} id={vectorStoreId} />;
|
||||
}
|
||||
|
||||
const mainContent = (
|
||||
<>
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>File Information</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isLoadingFile ? (
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-4 w-full" />
|
||||
<Skeleton className="h-4 w-3/4" />
|
||||
<Skeleton className="h-4 w-1/2" />
|
||||
</div>
|
||||
) : errorFile ? (
|
||||
<div className="text-destructive text-sm">
|
||||
Error loading file: {errorFile.message}
|
||||
</div>
|
||||
) : file ? (
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<h3 className="text-lg font-medium mb-2">File Details</h3>
|
||||
<div className="grid grid-cols-2 gap-4 text-sm">
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Status:</span>
|
||||
<span className="ml-2">{file.status}</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Size:</span>
|
||||
<span className="ml-2">{file.usage_bytes} bytes</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Created:</span>
|
||||
<span className="ml-2">{new Date(file.created_at * 1000).toLocaleString()}</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Content Strategy:</span>
|
||||
<span className="ml-2">{file.chunking_strategy.type}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="border-t pt-4">
|
||||
<h3 className="text-lg font-medium mb-3">Actions</h3>
|
||||
<Button
|
||||
onClick={handleViewContents}
|
||||
className="flex items-center gap-2 hover:bg-primary/90 dark:hover:bg-primary/80 hover:scale-105 transition-all duration-200"
|
||||
>
|
||||
<List className="h-4 w-4" />
|
||||
View Contents
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<p className="text-gray-500 italic text-sm">
|
||||
File not found.
|
||||
</p>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Content Summary</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isLoadingContents ? (
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-4 w-full" />
|
||||
<Skeleton className="h-4 w-3/4" />
|
||||
<Skeleton className="h-4 w-1/2" />
|
||||
</div>
|
||||
) : errorContents ? (
|
||||
<div className="text-destructive text-sm">
|
||||
Error loading content summary: {errorContents.message}
|
||||
</div>
|
||||
) : contents && contents.content.length > 0 ? (
|
||||
<div className="space-y-3">
|
||||
<div className="grid grid-cols-2 gap-4 text-sm">
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Content Items:</span>
|
||||
<span className="ml-2">{contents.content.length}</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-gray-600 dark:text-gray-400">Total Characters:</span>
|
||||
<span className="ml-2">{contents.content.reduce((total, item) => total + item.text.length, 0)}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className="pt-2">
|
||||
<span className="text-sm font-medium text-gray-600 dark:text-gray-400">Preview:</span>
|
||||
<div className="mt-1 bg-gray-50 dark:bg-gray-800 rounded-md p-3">
|
||||
<p className="text-sm text-gray-900 dark:text-gray-100 line-clamp-3">
|
||||
{contents.content[0]?.text.substring(0, 200)}...
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<p className="text-gray-500 italic text-sm">
|
||||
No contents found for this file.
|
||||
</p>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
|
||||
const sidebar = (
|
||||
<PropertiesCard>
|
||||
<PropertyItem label="File ID" value={fileId} />
|
||||
<PropertyItem label="Vector Store ID" value={vectorStoreId} />
|
||||
{file && (
|
||||
<>
|
||||
<PropertyItem label="Status" value={file.status} />
|
||||
<PropertyItem
|
||||
label="Created"
|
||||
value={new Date(file.created_at * 1000).toLocaleString()}
|
||||
/>
|
||||
<PropertyItem label="Usage Bytes" value={file.usage_bytes} />
|
||||
<PropertyItem
|
||||
label="Content Strategy"
|
||||
value={file.chunking_strategy.type}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{store && (
|
||||
<>
|
||||
<PropertyItem label="Store Name" value={store.name || ""} />
|
||||
<PropertyItem
|
||||
label="Provider ID"
|
||||
value={(store.metadata.provider_id as string) || ""}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</PropertiesCard>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageBreadcrumb segments={breadcrumbSegments} />
|
||||
<DetailLayout title={title} mainContent={mainContent} sidebar={sidebar} />
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -1,16 +1,31 @@
|
|||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import LogsLayout from "@/components/layout/logs-layout";
|
||||
import { useParams, usePathname } from "next/navigation";
|
||||
import {
|
||||
PageBreadcrumb,
|
||||
BreadcrumbSegment,
|
||||
} from "@/components/layout/page-breadcrumb";
|
||||
|
||||
export default function VectorStoresLayout({
|
||||
export default function VectorStoreDetailLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const params = useParams();
|
||||
const pathname = usePathname();
|
||||
const vectorStoreId = params.id as string;
|
||||
|
||||
const breadcrumbSegments: BreadcrumbSegment[] = [
|
||||
{ label: "Vector Stores", href: "/logs/vector-stores" },
|
||||
{ label: `Details (${vectorStoreId})` },
|
||||
];
|
||||
|
||||
const isBaseDetailPage = pathname === `/logs/vector-stores/${vectorStoreId}`;
|
||||
|
||||
return (
|
||||
<LogsLayout sectionLabel="Vector Stores" basePath="/logs/vector-stores">
|
||||
<div className="space-y-4">
|
||||
{isBaseDetailPage && <PageBreadcrumb segments={breadcrumbSegments} />}
|
||||
{children}
|
||||
</LogsLayout>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import type {
|
|||
} from "llama-stack-client/resources/vector-stores/vector-stores";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { usePagination } from "@/hooks/use-pagination";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
|
@ -49,6 +50,7 @@ export default function VectorStoresPage() {
|
|||
}
|
||||
}, [status, hasMore, loadMore]);
|
||||
|
||||
const renderContent = () => {
|
||||
if (status === "loading") {
|
||||
return (
|
||||
<div className="space-y-2">
|
||||
|
@ -98,7 +100,17 @@ export default function VectorStoresPage() {
|
|||
onClick={() => router.push(`/logs/vector-stores/${store.id}`)}
|
||||
className="cursor-pointer hover:bg-muted/50"
|
||||
>
|
||||
<TableCell>{store.id}</TableCell>
|
||||
<TableCell>
|
||||
<Button
|
||||
variant="link"
|
||||
className="p-0 h-auto font-mono text-blue-600 hover:text-blue-800 dark:text-blue-400 dark:hover:text-blue-300"
|
||||
onClick={() =>
|
||||
router.push(`/logs/vector-stores/${store.id}`)
|
||||
}
|
||||
>
|
||||
{store.id}
|
||||
</Button>
|
||||
</TableCell>
|
||||
<TableCell>{store.name}</TableCell>
|
||||
<TableCell>
|
||||
{new Date(store.created_at * 1000).toLocaleString()}
|
||||
|
@ -118,4 +130,12 @@ export default function VectorStoresPage() {
|
|||
</Table>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<h1 className="text-2xl font-semibold">Vector Stores</h1>
|
||||
{renderContent()}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
"use client";
|
||||
|
||||
import { useRouter } from "next/navigation";
|
||||
import type { VectorStore } from "llama-stack-client/resources/vector-stores/vector-stores";
|
||||
import type { VectorStoreFile } from "llama-stack-client/resources/vector-stores/files";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
DetailLoadingView,
|
||||
DetailErrorView,
|
||||
|
@ -42,6 +44,11 @@ export function VectorStoreDetailView({
|
|||
id,
|
||||
}: VectorStoreDetailViewProps) {
|
||||
const title = "Vector Store Details";
|
||||
const router = useRouter();
|
||||
|
||||
const handleFileClick = (fileId: string) => {
|
||||
router.push(`/logs/vector-stores/${id}/files/${fileId}`);
|
||||
};
|
||||
|
||||
if (errorStore) {
|
||||
return <DetailErrorView title={title} id={id} error={errorStore} />;
|
||||
|
@ -80,7 +87,15 @@ export function VectorStoreDetailView({
|
|||
<TableBody>
|
||||
{files.map((file) => (
|
||||
<TableRow key={file.id}>
|
||||
<TableCell>{file.id}</TableCell>
|
||||
<TableCell>
|
||||
<Button
|
||||
variant="link"
|
||||
className="p-0 h-auto font-mono text-blue-600 hover:text-blue-800 dark:text-blue-400 dark:hover:text-blue-300"
|
||||
onClick={() => handleFileClick(file.id)}
|
||||
>
|
||||
{file.id}
|
||||
</Button>
|
||||
</TableCell>
|
||||
<TableCell>{file.status}</TableCell>
|
||||
<TableCell>
|
||||
{new Date(file.created_at * 1000).toLocaleString()}
|
||||
|
|
112
llama_stack/ui/lib/contents-api.ts
Normal file
112
llama_stack/ui/lib/contents-api.ts
Normal file
|
@ -0,0 +1,112 @@
|
|||
import type { FileContentResponse } from "llama-stack-client/resources/vector-stores/files";
|
||||
import type { LlamaStackClient } from "llama-stack-client";
|
||||
|
||||
export type VectorStoreContent = FileContentResponse.Content;
|
||||
export type VectorStoreContentsResponse = FileContentResponse;
|
||||
|
||||
export interface VectorStoreContentItem {
|
||||
id: string;
|
||||
object: string;
|
||||
created_timestamp: number;
|
||||
vector_store_id: string;
|
||||
file_id: string;
|
||||
content: VectorStoreContent;
|
||||
metadata: Record<string, any>;
|
||||
embedding?: number[];
|
||||
}
|
||||
|
||||
export interface VectorStoreContentDeleteResponse {
|
||||
id: string;
|
||||
object: string;
|
||||
deleted: boolean;
|
||||
}
|
||||
|
||||
export interface VectorStoreListContentsResponse {
|
||||
object: string;
|
||||
data: VectorStoreContentItem[];
|
||||
first_id?: string;
|
||||
last_id?: string;
|
||||
has_more: boolean;
|
||||
}
|
||||
|
||||
export class ContentsAPI {
|
||||
constructor(private client: LlamaStackClient) {}
|
||||
|
||||
async getFileContents(vectorStoreId: string, fileId: string): Promise<VectorStoreContentsResponse> {
|
||||
return this.client.vectorStores.files.content(vectorStoreId, fileId);
|
||||
}
|
||||
|
||||
async getContent(vectorStoreId: string, fileId: string, contentId: string): Promise<VectorStoreContentItem> {
|
||||
const contentsResponse = await this.listContents(vectorStoreId, fileId);
|
||||
const targetContent = contentsResponse.data.find(c => c.id === contentId);
|
||||
|
||||
if (!targetContent) {
|
||||
throw new Error(`Content ${contentId} not found`);
|
||||
}
|
||||
|
||||
return targetContent;
|
||||
}
|
||||
|
||||
async updateContent(
|
||||
vectorStoreId: string,
|
||||
fileId: string,
|
||||
contentId: string,
|
||||
updates: { content?: string; metadata?: Record<string, any> }
|
||||
): Promise<VectorStoreContentItem> {
|
||||
throw new Error("Individual content updates not yet implemented in API");
|
||||
}
|
||||
|
||||
async deleteContent(vectorStoreId: string, fileId: string, contentId: string): Promise<VectorStoreContentDeleteResponse> {
|
||||
throw new Error("Individual content deletion not yet implemented in API");
|
||||
}
|
||||
|
||||
async listContents(
|
||||
vectorStoreId: string,
|
||||
fileId: string,
|
||||
options?: {
|
||||
limit?: number;
|
||||
order?: string;
|
||||
after?: string;
|
||||
before?: string;
|
||||
}
|
||||
): Promise<VectorStoreListContentsResponse> {
|
||||
const fileContents = await this.client.vectorStores.files.content(vectorStoreId, fileId);
|
||||
const contentItems: VectorStoreContentItem[] = [];
|
||||
|
||||
fileContents.content.forEach((content, contentIndex) => {
|
||||
const rawContent = content as any;
|
||||
|
||||
// Extract actual fields from the API response
|
||||
const embedding = rawContent.embedding || undefined;
|
||||
const created_timestamp = rawContent.created_timestamp || rawContent.created_at || Date.now() / 1000;
|
||||
const chunkMetadata = rawContent.chunk_metadata || {};
|
||||
const contentId = rawContent.chunk_metadata?.chunk_id || rawContent.id || `content_${fileId}_${contentIndex}`;
|
||||
const objectType = rawContent.object || 'vector_store.file.content';
|
||||
contentItems.push({
|
||||
id: contentId,
|
||||
object: objectType,
|
||||
created_timestamp: created_timestamp,
|
||||
vector_store_id: vectorStoreId,
|
||||
file_id: fileId,
|
||||
content: content,
|
||||
embedding: embedding,
|
||||
metadata: {
|
||||
...chunkMetadata, // chunk_metadata fields from API
|
||||
content_length: content.type === 'text' ? content.text.length : 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// apply pagination if needed
|
||||
let filteredItems = contentItems;
|
||||
if (options?.limit) {
|
||||
filteredItems = filteredItems.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return {
|
||||
object: 'list',
|
||||
data: filteredItems,
|
||||
has_more: contentItems.length > (options?.limit || contentItems.length),
|
||||
};
|
||||
}
|
||||
}
|
10
llama_stack/ui/package-lock.json
generated
10
llama_stack/ui/package-lock.json
generated
|
@ -18,7 +18,7 @@
|
|||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"framer-motion": "^11.18.2",
|
||||
"llama-stack-client": "0.2.16",
|
||||
"llama-stack-client": "0.2.17",
|
||||
"lucide-react": "^0.510.0",
|
||||
"next": "15.3.3",
|
||||
"next-auth": "^4.24.11",
|
||||
|
@ -9926,10 +9926,10 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/llama-stack-client": {
|
||||
"version": "0.2.16",
|
||||
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.2.16.tgz",
|
||||
"integrity": "sha512-jM7sh1CB5wVumutYb3qfmYJpoTe3IRAa5lm3Us4qO7zVP4tbo3eCE7BOFNWyChpjo9efafUItwogNh28pum9PQ==",
|
||||
"license": "Apache-2.0",
|
||||
"version": "0.2.17",
|
||||
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.2.17.tgz",
|
||||
"integrity": "sha512-+/fEO8M7XPiVLjhH7ge18i1ijKp4+h3dOkE0C8g2cvGuDUtDYIJlf8NSyr9OMByjiWpCibWU7VOKL50LwGLS3Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/node-fetch": "^2.6.4",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue