mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-19 15:38:52 +00:00
# What does this PR do? remove telemetry as a providable API from the codebase. This includes removing it from generated distributions but also the provider registry, the router, etc since `setup_logger` is tied pretty strictly to `Api.telemetry` being in impls we still need an "instantiated provider" in our implementations. However it should not be auto-routed or provided. So in validate_and_prepare_providers (called from resolve_impls) I made it so that if run_config.telemetry.enabled, we set up the meta-reference "provider" internally to be used so that log_event will work when called. This is the neatest way I think we can remove telemetry from the provider configs but also not need to rip apart the whole "telemetry is a provider" logic just yet, but we can do it internally later without disrupting users. so telemetry is removed from the registry such that if a user puts `telemetry:` as an API in their build/run config it will err out, but can still be used by us internally as we go through this transition. relates to #3806 Signed-off-by: Charlie Doern <cdoern@redhat.com>
41 lines
1.4 KiB
Python
41 lines
1.4 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
|
|
from llama_stack.providers.datatypes import (
|
|
Api,
|
|
InlineProviderSpec,
|
|
ProviderSpec,
|
|
)
|
|
from llama_stack.providers.utils.kvstore import kvstore_dependencies
|
|
|
|
|
|
def available_providers() -> list[ProviderSpec]:
|
|
return [
|
|
InlineProviderSpec(
|
|
api=Api.agents,
|
|
provider_type="inline::meta-reference",
|
|
pip_packages=[
|
|
"matplotlib",
|
|
"pillow",
|
|
"pandas",
|
|
"scikit-learn",
|
|
"mcp>=1.8.1",
|
|
]
|
|
+ kvstore_dependencies(), # TODO make this dynamic based on the kvstore config
|
|
module="llama_stack.providers.inline.agents.meta_reference",
|
|
config_class="llama_stack.providers.inline.agents.meta_reference.MetaReferenceAgentsImplConfig",
|
|
api_dependencies=[
|
|
Api.inference,
|
|
Api.safety,
|
|
Api.vector_io,
|
|
Api.tool_runtime,
|
|
Api.tool_groups,
|
|
Api.conversations,
|
|
],
|
|
description="Meta's reference implementation of an agent system that can use tools, access vector databases, and perform complex reasoning tasks.",
|
|
),
|
|
]
|