This commit is contained in:
Xi Yan 2025-03-18 21:52:00 -07:00
parent 8a576d7d72
commit b3060ce8a1
4 changed files with 7 additions and 13 deletions

View file

@ -14,7 +14,6 @@ from .routing_tables import (
BenchmarksRoutingTable, BenchmarksRoutingTable,
DatasetsRoutingTable, DatasetsRoutingTable,
ModelsRoutingTable, ModelsRoutingTable,
ScoringFunctionsRoutingTable,
ShieldsRoutingTable, ShieldsRoutingTable,
ToolGroupsRoutingTable, ToolGroupsRoutingTable,
VectorDBsRoutingTable, VectorDBsRoutingTable,
@ -47,10 +46,8 @@ async def get_routing_table_impl(
async def get_auto_router_impl(api: Api, routing_table: RoutingTable, deps: Dict[str, Any]) -> Any: async def get_auto_router_impl(api: Api, routing_table: RoutingTable, deps: Dict[str, Any]) -> Any:
from .routers import ( from .routers import (
DatasetIORouter, DatasetIORouter,
EvalRouter,
InferenceRouter, InferenceRouter,
SafetyRouter, SafetyRouter,
ScoringRouter,
ToolRuntimeRouter, ToolRuntimeRouter,
VectorIORouter, VectorIORouter,
) )
@ -60,8 +57,6 @@ async def get_auto_router_impl(api: Api, routing_table: RoutingTable, deps: Dict
"inference": InferenceRouter, "inference": InferenceRouter,
"safety": SafetyRouter, "safety": SafetyRouter,
"datasetio": DatasetIORouter, "datasetio": DatasetIORouter,
"scoring": ScoringRouter,
"eval": EvalRouter,
"tool_runtime": ToolRuntimeRouter, "tool_runtime": ToolRuntimeRouter,
} }
api_to_deps = { api_to_deps = {

View file

@ -20,7 +20,6 @@ async def get_provider_impl(
config, config,
deps[Api.datasetio], deps[Api.datasetio],
deps[Api.datasets], deps[Api.datasets],
deps[Api.scoring],
deps[Api.inference], deps[Api.inference],
deps[Api.agents], deps[Api.agents],
) )

View file

@ -5,14 +5,12 @@
# the root directory of this source tree. # the root directory of this source tree.
from enum import Enum from enum import Enum
from typing import Any, Dict, List
from llama_stack.apis.common.type_system import ( from llama_stack.apis.common.type_system import (
ChatCompletionInputType, ChatCompletionInputType,
CompletionInputType, CompletionInputType,
StringType, StringType,
) )
from llama_stack.distribution.datatypes import Api
class ColumnName(Enum): class ColumnName(Enum):

View file

@ -30,12 +30,14 @@ from llama_stack.providers.remote.vector_io.pgvector.config import (
from llama_stack.providers.utils.inference.model_registry import ProviderModelEntry from llama_stack.providers.utils.inference.model_registry import ProviderModelEntry
from llama_stack.templates.template import ( from llama_stack.templates.template import (
DistributionTemplate, DistributionTemplate,
RunConfigSettings,
get_model_registry, get_model_registry,
RunConfigSettings,
) )
def get_inference_providers() -> Tuple[List[Provider], Dict[str, List[ProviderModelEntry]]]: def get_inference_providers() -> (
Tuple[List[Provider], Dict[str, List[ProviderModelEntry]]]
):
# in this template, we allow each API key to be optional # in this template, we allow each API key to be optional
providers = [ providers = [
( (
@ -116,7 +118,9 @@ def get_distribution_template() -> DistributionTemplate:
Provider( Provider(
provider_id="sqlite-vec", provider_id="sqlite-vec",
provider_type="inline::sqlite-vec", provider_type="inline::sqlite-vec",
config=SQLiteVectorIOConfig.sample_run_config(f"~/.llama/distributions/{name}"), config=SQLiteVectorIOConfig.sample_run_config(
f"~/.llama/distributions/{name}"
),
), ),
Provider( Provider(
provider_id="${env.ENABLE_CHROMADB+chromadb}", provider_id="${env.ENABLE_CHROMADB+chromadb}",
@ -208,7 +212,6 @@ def get_distribution_template() -> DistributionTemplate:
] ]
# TODO(xiyan): fix this back as registerable resources # TODO(xiyan): fix this back as registerable resources
default_benchmarks = []
# default_benchmarks = [ # default_benchmarks = [
# BenchmarkInput( # BenchmarkInput(
# benchmark_id="meta-reference-simpleqa", # benchmark_id="meta-reference-simpleqa",
@ -255,7 +258,6 @@ def get_distribution_template() -> DistributionTemplate:
default_tool_groups=default_tool_groups, default_tool_groups=default_tool_groups,
default_shields=[ShieldInput(shield_id="meta-llama/Llama-Guard-3-8B")], default_shields=[ShieldInput(shield_id="meta-llama/Llama-Guard-3-8B")],
default_datasets=default_datasets, default_datasets=default_datasets,
default_benchmarks=default_benchmarks,
), ),
}, },
run_config_env_vars={ run_config_env_vars={