mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-08 13:00:52 +00:00
# What does this PR do? there is a lot of code in the agents API using the telemetry API and its helpers without checking if that API is even enabled. This is the only API besides inference actively using telemetry code, so after this telemetry can be optional for the entire stack resolves #3665 ## Test Plan existing agent tests. Signed-off-by: Charlie Doern <cdoern@redhat.com>
44 lines
1.5 KiB
Python
44 lines
1.5 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
|
|
from llama_stack.providers.datatypes import (
|
|
Api,
|
|
InlineProviderSpec,
|
|
ProviderSpec,
|
|
)
|
|
from llama_stack.providers.utils.kvstore import kvstore_dependencies
|
|
|
|
|
|
def available_providers() -> list[ProviderSpec]:
|
|
return [
|
|
InlineProviderSpec(
|
|
api=Api.agents,
|
|
provider_type="inline::meta-reference",
|
|
pip_packages=[
|
|
"matplotlib",
|
|
"pillow",
|
|
"pandas",
|
|
"scikit-learn",
|
|
"mcp>=1.8.1",
|
|
]
|
|
+ kvstore_dependencies(), # TODO make this dynamic based on the kvstore config
|
|
module="llama_stack.providers.inline.agents.meta_reference",
|
|
config_class="llama_stack.providers.inline.agents.meta_reference.MetaReferenceAgentsImplConfig",
|
|
api_dependencies=[
|
|
Api.inference,
|
|
Api.safety,
|
|
Api.vector_io,
|
|
Api.vector_dbs,
|
|
Api.tool_runtime,
|
|
Api.tool_groups,
|
|
],
|
|
optional_api_dependencies=[
|
|
Api.telemetry,
|
|
],
|
|
description="Meta's reference implementation of an agent system that can use tools, access vector databases, and perform complex reasoning tasks.",
|
|
),
|
|
]
|