mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-02 08:44:44 +00:00
init
This commit is contained in:
parent
b7a7caa9a8
commit
e65a6fac9d
7 changed files with 135 additions and 0 deletions
7
llama_stack/apis/tools/__init__.py
Normal file
7
llama_stack/apis/tools/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from .tools import * # noqa: F401 F403
|
86
llama_stack/apis/tools/tools.py
Normal file
86
llama_stack/apis/tools/tools.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from typing import Any, Dict, List, Literal, Optional
|
||||
|
||||
from llama_models.llama3.api.datatypes import ToolPromptFormat
|
||||
|
||||
from llama_models.schema_utils import json_schema_type
|
||||
from pydantic import BaseModel, Field
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
from llama_stack.apis.resource import Resource
|
||||
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ToolParameter(BaseModel):
|
||||
"""Represents a parameter in a tool's function signature"""
|
||||
|
||||
name: str
|
||||
type_hint: str
|
||||
description: str
|
||||
required: bool = True
|
||||
default: Optional[Any] = None
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ToolReturn(BaseModel):
|
||||
"""Represents the return type and description of a tool"""
|
||||
|
||||
type_hint: str
|
||||
description: str
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class Tool(Resource):
|
||||
"""Represents a tool that can be provided by different providers"""
|
||||
|
||||
resource_type: Literal["tool"] = "tool"
|
||||
name: str
|
||||
description: str
|
||||
parameters: List[ToolParameter]
|
||||
returns: ToolReturn
|
||||
provider_metadata: Optional[Dict[str, Any]] = None
|
||||
tool_prompt_format: Optional[ToolPromptFormat] = Field(
|
||||
default=ToolPromptFormat.json
|
||||
)
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
@trace_protocol
|
||||
class Tools(Protocol):
|
||||
async def register_tool(
|
||||
self,
|
||||
tool_id: str,
|
||||
name: str,
|
||||
description: str,
|
||||
parameters: List[ToolParameter],
|
||||
returns: ToolReturn,
|
||||
provider_metadata: Optional[Dict[str, Any]] = None,
|
||||
tool_prompt_format: Optional[ToolPromptFormat] = None,
|
||||
) -> Tool:
|
||||
"""Register a tool with provider-specific metadata"""
|
||||
...
|
||||
|
||||
async def get_tool(
|
||||
self,
|
||||
identifier: str,
|
||||
) -> Tool: ...
|
||||
|
||||
async def list_tools(
|
||||
self,
|
||||
provider_id: Optional[str] = None,
|
||||
) -> List[Tool]:
|
||||
"""List tools with optional provider"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
@trace_protocol
|
||||
class ToolRuntime(Protocol):
|
||||
def invoke_tool(self, tool_id: str, args: Dict[str, Any]) -> Any:
|
||||
"""Run a tool with the given arguments"""
|
||||
...
|
|
@ -30,6 +30,7 @@ from llama_stack.apis.scoring import Scoring
|
|||
from llama_stack.apis.scoring_functions import ScoringFunctions
|
||||
from llama_stack.apis.shields import Shields
|
||||
from llama_stack.apis.telemetry import Telemetry
|
||||
from llama_stack.apis.tools import Tools
|
||||
from llama_stack.distribution.client import get_client_impl
|
||||
from llama_stack.distribution.distribution import builtin_automatically_routed_apis
|
||||
from llama_stack.distribution.store import DistributionRegistry
|
||||
|
@ -66,6 +67,7 @@ def api_protocol_map() -> Dict[Api, Any]:
|
|||
def additional_protocols_map() -> Dict[Api, Any]:
|
||||
return {
|
||||
Api.inference: (ModelsProtocolPrivate, Models, Api.models),
|
||||
Api.tools: (ToolsProtocolPrivate, Tools, Api.tools),
|
||||
Api.memory: (MemoryBanksProtocolPrivate, MemoryBanks, Api.memory_banks),
|
||||
Api.safety: (ShieldsProtocolPrivate, Shields, Api.shields),
|
||||
Api.datasetio: (DatasetsProtocolPrivate, Datasets, Api.datasets),
|
||||
|
|
|
@ -17,6 +17,7 @@ from llama_stack.apis.memory_banks.memory_banks import MemoryBank
|
|||
from llama_stack.apis.models import Model
|
||||
from llama_stack.apis.scoring_functions import ScoringFn
|
||||
from llama_stack.apis.shields import Shield
|
||||
from llama_stack.apis.tools import Tool
|
||||
|
||||
|
||||
@json_schema_type
|
||||
|
@ -75,6 +76,10 @@ class EvalTasksProtocolPrivate(Protocol):
|
|||
async def register_eval_task(self, eval_task: EvalTask) -> None: ...
|
||||
|
||||
|
||||
class ToolsProtocolPrivate(Protocol):
|
||||
async def register_tool(self, tool: Tool) -> None: ...
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ProviderSpec(BaseModel):
|
||||
api: Api
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from .meta_reference import * # noqa: F401 F403
|
11
llama_stack/providers/inline/tools/meta_reference/config.py
Normal file
11
llama_stack/providers/inline/tools/meta_reference/config.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class MetaReferenceToolConfig(BaseModel):
|
||||
pass
|
|
@ -0,0 +1,17 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from llama_stack.apis.tools import Tool, Tools
|
||||
|
||||
from .config import MetaReferenceToolConfig
|
||||
|
||||
|
||||
class MetaReferenceTool(Tools):
|
||||
def __init__(self, config: MetaReferenceToolConfig):
|
||||
self.config = config
|
||||
|
||||
async def register_tool(self, tool: Tool):
|
||||
pass
|
Loading…
Add table
Add a link
Reference in a new issue