diff --git a/llama_stack/providers/remote/inference/gemini/models.py b/llama_stack/providers/remote/inference/gemini/models.py index 6fda35e0f..bd696b0ac 100644 --- a/llama_stack/providers/remote/inference/gemini/models.py +++ b/llama_stack/providers/remote/inference/gemini/models.py @@ -13,7 +13,9 @@ LLM_MODEL_IDS = [ "gemini-1.5-flash", "gemini-1.5-pro", "gemini-2.0-flash", + "gemini-2.0-flash-lite", "gemini-2.5-flash", + "gemini-2.5-flash-lite", "gemini-2.5-pro", ] diff --git a/llama_stack/providers/utils/telemetry/tracing.py b/llama_stack/providers/utils/telemetry/tracing.py index 75b29cdce..7080e774a 100644 --- a/llama_stack/providers/utils/telemetry/tracing.py +++ b/llama_stack/providers/utils/telemetry/tracing.py @@ -9,7 +9,9 @@ import contextvars import logging import queue import random +import sys import threading +import time from collections.abc import Callable from datetime import UTC, datetime from functools import wraps @@ -30,6 +32,16 @@ from llama_stack.providers.utils.telemetry.trace_protocol import serialize_value logger = get_logger(__name__, category="core") +# Fallback logger that does NOT propagate to TelemetryHandler to avoid recursion +_fallback_logger = logging.getLogger("llama_stack.telemetry.background") +if not _fallback_logger.handlers: + _fallback_logger.propagate = False + _fallback_logger.setLevel(logging.ERROR) + _fallback_handler = logging.StreamHandler(sys.stderr) + _fallback_handler.setLevel(logging.ERROR) + _fallback_handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(message)s")) + _fallback_logger.addHandler(_fallback_handler) + INVALID_SPAN_ID = 0x0000000000000000 INVALID_TRACE_ID = 0x00000000000000000000000000000000 @@ -79,19 +91,32 @@ def generate_trace_id() -> str: CURRENT_TRACE_CONTEXT = contextvars.ContextVar("trace_context", default=None) BACKGROUND_LOGGER = None +LOG_QUEUE_FULL_LOG_INTERVAL_SECONDS = 60.0 + class BackgroundLogger: def __init__(self, api: Telemetry, capacity: int = 100000): self.api = api - self.log_queue = queue.Queue(maxsize=capacity) + self.log_queue: queue.Queue[Any] = queue.Queue(maxsize=capacity) self.worker_thread = threading.Thread(target=self._process_logs, daemon=True) self.worker_thread.start() + self._last_queue_full_log_time: float = 0.0 + self._dropped_since_last_notice: int = 0 def log_event(self, event): try: self.log_queue.put_nowait(event) except queue.Full: - logger.error("Log queue is full, dropping event") + # Aggregate drops and emit at most once per interval via fallback logger + self._dropped_since_last_notice += 1 + current_time = time.time() + if current_time - self._last_queue_full_log_time >= LOG_QUEUE_FULL_LOG_INTERVAL_SECONDS: + _fallback_logger.error( + "Log queue is full; dropped %d events since last notice", + self._dropped_since_last_notice, + ) + self._last_queue_full_log_time = current_time + self._dropped_since_last_notice = 0 def _process_logs(self): while True: diff --git a/llama_stack/ui/app/logs/vector-stores/[id]/files/[fileId]/contents/[contentId]/page.tsx b/llama_stack/ui/app/logs/vector-stores/[id]/files/[fileId]/contents/[contentId]/page.tsx new file mode 100644 index 000000000..6896b992a --- /dev/null +++ b/llama_stack/ui/app/logs/vector-stores/[id]/files/[fileId]/contents/[contentId]/page.tsx @@ -0,0 +1,383 @@ +"use client"; + +import { useEffect, useState } from "react"; +import { useParams, useRouter } from "next/navigation"; +import { useAuthClient } from "@/hooks/use-auth-client"; +import { ContentsAPI, VectorStoreContentItem } from "@/lib/contents-api"; +import type { VectorStore } from "llama-stack-client/resources/vector-stores/vector-stores"; +import type { VectorStoreFile } from "llama-stack-client/resources/vector-stores/files"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Edit, Save, X, Trash2 } from "lucide-react"; +import { + DetailLoadingView, + DetailErrorView, + DetailNotFoundView, + DetailLayout, + PropertiesCard, + PropertyItem, +} from "@/components/layout/detail-layout"; +import { PageBreadcrumb, BreadcrumbSegment } from "@/components/layout/page-breadcrumb"; + +export default function ContentDetailPage() { + const params = useParams(); + const router = useRouter(); + const vectorStoreId = params.id as string; + const fileId = params.fileId as string; + const contentId = params.contentId as string; + const client = useAuthClient(); + + const getTextFromContent = (content: any): string => { + if (typeof content === 'string') { + return content; + } else if (content && content.type === 'text') { + return content.text; + } + return ''; + }; + + const [store, setStore] = useState(null); + const [file, setFile] = useState(null); + const [content, setContent] = useState(null); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [isEditing, setIsEditing] = useState(false); + const [editedContent, setEditedContent] = useState(""); + const [editedMetadata, setEditedMetadata] = useState>({}); + const [isEditingEmbedding, setIsEditingEmbedding] = useState(false); + const [editedEmbedding, setEditedEmbedding] = useState([]); + + useEffect(() => { + if (!vectorStoreId || !fileId || !contentId) return; + + const fetchData = async () => { + setIsLoading(true); + setError(null); + try { + const [storeResponse, fileResponse] = await Promise.all([ + client.vectorStores.retrieve(vectorStoreId), + client.vectorStores.files.retrieve(vectorStoreId, fileId), + ]); + + setStore(storeResponse as VectorStore); + setFile(fileResponse as VectorStoreFile); + + const contentsAPI = new ContentsAPI(client); + const contentsResponse = await contentsAPI.listContents(vectorStoreId, fileId); + const targetContent = contentsResponse.data.find(c => c.id === contentId); + + if (targetContent) { + setContent(targetContent); + setEditedContent(getTextFromContent(targetContent.content)); + setEditedMetadata({ ...targetContent.metadata }); + setEditedEmbedding(targetContent.embedding || []); + } else { + throw new Error(`Content ${contentId} not found`); + } + } catch (err) { + setError(err instanceof Error ? err : new Error("Failed to load content.")); + } finally { + setIsLoading(false); + } + }; + fetchData(); + }, [vectorStoreId, fileId, contentId, client]); + + const handleSave = async () => { + if (!content) return; + + try { + const updates: { content?: string; metadata?: Record } = {}; + + if (editedContent !== getTextFromContent(content.content)) { + updates.content = editedContent; + } + + if (JSON.stringify(editedMetadata) !== JSON.stringify(content.metadata)) { + updates.metadata = editedMetadata; + } + + if (Object.keys(updates).length > 0) { + const contentsAPI = new ContentsAPI(client); + const updatedContent = await contentsAPI.updateContent(vectorStoreId, fileId, contentId, updates); + setContent(updatedContent); + } + + setIsEditing(false); + } catch (err) { + console.error('Failed to update content:', err); + } + }; + + const handleDelete = async () => { + if (!confirm('Are you sure you want to delete this content?')) return; + + try { + const contentsAPI = new ContentsAPI(client); + await contentsAPI.deleteContent(vectorStoreId, fileId, contentId); + router.push(`/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents`); + } catch (err) { + console.error('Failed to delete content:', err); + } + }; + + const handleCancel = () => { + setEditedContent(content ? getTextFromContent(content.content) : ""); + setEditedMetadata({ ...content?.metadata }); + setEditedEmbedding(content?.embedding || []); + setIsEditing(false); + setIsEditingEmbedding(false); + }; + + const title = `Content: ${contentId}`; + + const breadcrumbSegments: BreadcrumbSegment[] = [ + { label: "Vector Stores", href: "/logs/vector-stores" }, + { label: store?.name || vectorStoreId, href: `/logs/vector-stores/${vectorStoreId}` }, + { label: "Files", href: `/logs/vector-stores/${vectorStoreId}` }, + { label: fileId, href: `/logs/vector-stores/${vectorStoreId}/files/${fileId}` }, + { label: "Contents", href: `/logs/vector-stores/${vectorStoreId}/files/${fileId}/contents` }, + { label: contentId }, + ]; + + if (error) { + return ; + } + if (isLoading) { + return ; + } + if (!content) { + return ; + } + + const mainContent = ( + <> + + + Content +
+ {isEditing ? ( + <> + + + + ) : ( + <> + + + + )} +
+
+ + {isEditing ? ( +