mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-07 12:47:37 +00:00
Make each inference provider into its own subdirectory
This commit is contained in:
parent
f64668319c
commit
0de5a807c7
42 changed files with 123 additions and 103 deletions
|
@ -0,0 +1,8 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from .agentic_system import get_provider_impl # noqa
|
||||
from .config import AgenticSystemConfig # noqa
|
|
@ -10,12 +10,24 @@ import uuid
|
|||
from datetime import datetime
|
||||
from typing import AsyncGenerator, List, Optional
|
||||
|
||||
from llama_toolchain.inference.api import Inference
|
||||
from llama_toolchain.safety.api import Safety
|
||||
from llama_toolchain.agentic_system.api.datatypes import (
|
||||
AgenticSystemInstanceConfig,
|
||||
AgenticSystemTurnResponseEvent,
|
||||
AgenticSystemTurnResponseEventType,
|
||||
AgenticSystemTurnResponseStepCompletePayload,
|
||||
AgenticSystemTurnResponseStepProgressPayload,
|
||||
AgenticSystemTurnResponseStepStartPayload,
|
||||
AgenticSystemTurnResponseTurnCompletePayload,
|
||||
AgenticSystemTurnResponseTurnStartPayload,
|
||||
InferenceStep,
|
||||
Session,
|
||||
ShieldCallStep,
|
||||
StepType,
|
||||
ToolExecutionStep,
|
||||
Turn,
|
||||
)
|
||||
|
||||
from .api.endpoints import * # noqa
|
||||
|
||||
from llama_toolchain.inference.api import ChatCompletionRequest
|
||||
from llama_toolchain.inference.api import ChatCompletionRequest, Inference
|
||||
|
||||
from llama_toolchain.inference.api.datatypes import (
|
||||
Attachment,
|
||||
|
@ -33,36 +45,16 @@ from llama_toolchain.inference.api.datatypes import (
|
|||
ToolResponseMessage,
|
||||
URL,
|
||||
)
|
||||
from llama_toolchain.safety.api import Safety
|
||||
from llama_toolchain.safety.api.datatypes import (
|
||||
BuiltinShield,
|
||||
ShieldDefinition,
|
||||
ShieldResponse,
|
||||
)
|
||||
|
||||
from termcolor import cprint
|
||||
from llama_toolchain.agentic_system.api.endpoints import * # noqa
|
||||
|
||||
from .api.datatypes import (
|
||||
AgenticSystemInstanceConfig,
|
||||
AgenticSystemTurnResponseEvent,
|
||||
AgenticSystemTurnResponseEventType,
|
||||
AgenticSystemTurnResponseStepCompletePayload,
|
||||
AgenticSystemTurnResponseStepProgressPayload,
|
||||
AgenticSystemTurnResponseStepStartPayload,
|
||||
AgenticSystemTurnResponseTurnCompletePayload,
|
||||
AgenticSystemTurnResponseTurnStartPayload,
|
||||
InferenceStep,
|
||||
Session,
|
||||
ShieldCallStep,
|
||||
StepType,
|
||||
ToolExecutionStep,
|
||||
Turn,
|
||||
)
|
||||
from .api.endpoints import (
|
||||
AgenticSystemTurnCreateRequest,
|
||||
AgenticSystemTurnResponseStreamChunk,
|
||||
)
|
||||
from .safety import SafetyException, ShieldRunnerMixin
|
||||
|
||||
from .system_prompt import get_agentic_prefix_messages
|
||||
from .tools.base import BaseTool
|
||||
from .tools.builtin import SingleMessageBuiltinTool
|
|
@ -5,25 +5,18 @@
|
|||
# the root directory of this source tree.
|
||||
|
||||
|
||||
from llama_toolchain.agentic_system.api import AgenticSystem
|
||||
|
||||
from llama_toolchain.distribution.datatypes import Api, ProviderSpec
|
||||
from llama_toolchain.inference.api import Inference
|
||||
from llama_toolchain.safety.api import Safety
|
||||
|
||||
from .config import AgenticSystemConfig
|
||||
from .api.endpoints import * # noqa
|
||||
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from typing import AsyncGenerator, Dict
|
||||
|
||||
from llama_toolchain.distribution.datatypes import Api, ProviderSpec
|
||||
from llama_toolchain.inference.api import Inference
|
||||
from llama_toolchain.inference.api.datatypes import BuiltinTool
|
||||
|
||||
from .agent_instance import AgentInstance
|
||||
|
||||
from .api.endpoints import (
|
||||
from llama_toolchain.safety.api import Safety
|
||||
from llama_toolchain.agentic_system.api.endpoints import * # noqa
|
||||
from llama_toolchain.agentic_system.api import (
|
||||
AgenticSystem,
|
||||
AgenticSystemCreateRequest,
|
||||
AgenticSystemCreateResponse,
|
||||
AgenticSystemSessionCreateRequest,
|
||||
|
@ -31,6 +24,10 @@ from .api.endpoints import (
|
|||
AgenticSystemTurnCreateRequest,
|
||||
)
|
||||
|
||||
from .agent_instance import AgentInstance
|
||||
|
||||
from .config import AgenticSystemConfig
|
||||
|
||||
from .tools.builtin import (
|
||||
BraveSearchTool,
|
||||
CodeInterpreterTool,
|
|
@ -0,0 +1,5 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
from typing import List
|
||||
|
||||
from llama_toolchain.agentic_system.safety import ShieldRunnerMixin
|
||||
from llama_toolchain.agentic_system.meta_reference.safety import ShieldRunnerMixin
|
||||
|
||||
from llama_toolchain.inference.api import Message
|
||||
from llama_toolchain.safety.api.datatypes import ShieldDefinition
|
|
@ -19,8 +19,8 @@ def available_agentic_system_providers() -> List[ProviderSpec]:
|
|||
"torch",
|
||||
"transformers",
|
||||
],
|
||||
module="llama_toolchain.agentic_system.agentic_system",
|
||||
config_class="llama_toolchain.agentic_system.config.AgenticSystemConfig",
|
||||
module="llama_toolchain.agentic_system.meta_reference",
|
||||
config_class="llama_toolchain.agentic_system.meta_reference.AgenticSystemConfig",
|
||||
api_dependencies=[
|
||||
Api.inference,
|
||||
Api.safety,
|
||||
|
|
5
llama_toolchain/agentic_system/tools/custom/__init__.py
Normal file
5
llama_toolchain/agentic_system/tools/custom/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
|
@ -12,7 +12,10 @@ from typing import Dict, List
|
|||
from llama_models.llama3_1.api.datatypes import * # noqa: F403
|
||||
from llama_toolchain.agentic_system.api import * # noqa: F403
|
||||
|
||||
from .builtin import interpret_content_as_attachment
|
||||
# TODO: this is symptomatic of us needing to pull more tooling related utilities
|
||||
from llama_toolchain.agentic_system.meta_reference.tools.builtin import (
|
||||
interpret_content_as_attachment,
|
||||
)
|
||||
|
||||
|
||||
class CustomTool:
|
|
@ -17,10 +17,15 @@ from llama_toolchain.agentic_system.api import (
|
|||
)
|
||||
from llama_toolchain.agentic_system.client import AgenticSystemClient
|
||||
|
||||
from llama_toolchain.agentic_system.tools.execute import execute_with_custom_tools
|
||||
from llama_toolchain.agentic_system.tools.custom.execute import (
|
||||
execute_with_custom_tools,
|
||||
)
|
||||
from llama_toolchain.safety.api.datatypes import BuiltinShield, ShieldDefinition
|
||||
|
||||
|
||||
# TODO: this should move back to the llama-agentic-system repo
|
||||
|
||||
|
||||
class AgenticSystemClientWrapper:
|
||||
|
||||
def __init__(self, api, system_id, custom_tools):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue