mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
feat(api): add file_processing to Api enumeration and include FileProcessors in LlamaStack
This commit introduces a new enumeration value, `file_processing`, to the `Api` class, enhancing the API's capabilities for file management. Additionally, the `FileProcessors` class is now included in the `LlamaStack` class, allowing for better integration of file processing functionalities within the stack. No functional changes were made to existing features.
This commit is contained in:
parent
e6b27db30a
commit
21765e99a0
3 changed files with 54 additions and 1 deletions
|
|
@ -82,6 +82,7 @@ class DynamicApiMeta(EnumMeta):
|
||||||
@json_schema_type
|
@json_schema_type
|
||||||
class Api(Enum, metaclass=DynamicApiMeta):
|
class Api(Enum, metaclass=DynamicApiMeta):
|
||||||
"""Enumeration of all available APIs in the Llama Stack system.
|
"""Enumeration of all available APIs in the Llama Stack system.
|
||||||
|
:cvar file_processing: File processing and management
|
||||||
:cvar providers: Provider management and configuration
|
:cvar providers: Provider management and configuration
|
||||||
:cvar inference: Text generation, chat completions, and embeddings
|
:cvar inference: Text generation, chat completions, and embeddings
|
||||||
:cvar safety: Content moderation and safety shields
|
:cvar safety: Content moderation and safety shields
|
||||||
|
|
@ -104,7 +105,7 @@ class Api(Enum, metaclass=DynamicApiMeta):
|
||||||
:cvar prompts: Prompt versions and management
|
:cvar prompts: Prompt versions and management
|
||||||
:cvar inspect: Built-in system inspection and introspection
|
:cvar inspect: Built-in system inspection and introspection
|
||||||
"""
|
"""
|
||||||
|
file_processing = "file_processing"
|
||||||
providers = "providers"
|
providers = "providers"
|
||||||
inference = "inference"
|
inference = "inference"
|
||||||
safety = "safety"
|
safety = "safety"
|
||||||
|
|
|
||||||
50
src/llama_stack/apis/file_processors/file_processors.py
Normal file
50
src/llama_stack/apis/file_processors/file_processors.py
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from typing import Any, Protocol, runtime_checkable
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from llama_stack.apis.version import LLAMA_STACK_API_V1
|
||||||
|
from llama_stack.core.telemetry.trace_protocol import trace_protocol
|
||||||
|
from llama_stack.schema_utils import json_schema_type, webmethod
|
||||||
|
|
||||||
|
|
||||||
|
@json_schema_type
|
||||||
|
class ProcessedContent(BaseModel):
|
||||||
|
"""
|
||||||
|
Result of file processing containing extracted content and metadata.
|
||||||
|
|
||||||
|
:param content: Extracted text content from the file
|
||||||
|
:param metadata: Processing metadata including processor info, timing, etc.
|
||||||
|
"""
|
||||||
|
content: str = Field(..., description="Extracted text content from file")
|
||||||
|
metadata: dict[str, Any] = Field(default_factory=dict, description="Processing metadata")
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
@trace_protocol
|
||||||
|
class FileProcessors(Protocol):
|
||||||
|
"""File Processors
|
||||||
|
|
||||||
|
This API provides document processing capabilities for extracting text content
|
||||||
|
from various file formats including PDFs, Word documents, and more.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@webmethod(route="/file-processors/process", method="POST", level=LLAMA_STACK_API_V1)
|
||||||
|
async def process_file(
|
||||||
|
self,
|
||||||
|
file_data: bytes,
|
||||||
|
filename: str,
|
||||||
|
options: dict[str, Any] | None = None
|
||||||
|
) -> ProcessedContent:
|
||||||
|
"""Process a file and return extracted text content.
|
||||||
|
|
||||||
|
:param file_data: The raw file data as bytes
|
||||||
|
:param filename: Name of the file (used for format detection)
|
||||||
|
:param options: Optional processing options (processor-specific)
|
||||||
|
:returns: ProcessedContent with extracted text and metadata
|
||||||
|
"""
|
||||||
|
|
@ -55,6 +55,7 @@ from llama_stack.core.store.registry import create_dist_registry
|
||||||
from llama_stack.core.utils.dynamic import instantiate_class_type
|
from llama_stack.core.utils.dynamic import instantiate_class_type
|
||||||
from llama_stack.log import get_logger
|
from llama_stack.log import get_logger
|
||||||
from llama_stack.providers.datatypes import Api
|
from llama_stack.providers.datatypes import Api
|
||||||
|
from llama_stack.apis.file_processors import FileProcessors
|
||||||
|
|
||||||
logger = get_logger(name=__name__, category="core")
|
logger = get_logger(name=__name__, category="core")
|
||||||
|
|
||||||
|
|
@ -82,6 +83,7 @@ class LlamaStack(
|
||||||
Files,
|
Files,
|
||||||
Prompts,
|
Prompts,
|
||||||
Conversations,
|
Conversations,
|
||||||
|
FileProcessors,
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue