chore: support default model in moderations API

# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-22 15:13:52 -07:00
parent 7b90e0e9c8
commit 0047a3cdab
23 changed files with 189 additions and 36 deletions

View file

@ -274,3 +274,5 @@ vector_stores:
default_embedding_model:
provider_id: sentence-transformers
model_id: nomic-ai/nomic-embed-text-v1.5
safety:
default_shield_id: llama-guard

View file

@ -277,3 +277,5 @@ vector_stores:
default_embedding_model:
provider_id: sentence-transformers
model_id: nomic-ai/nomic-embed-text-v1.5
safety:
default_shield_id: llama-guard

View file

@ -274,3 +274,5 @@ vector_stores:
default_embedding_model:
provider_id: sentence-transformers
model_id: nomic-ai/nomic-embed-text-v1.5
safety:
default_shield_id: llama-guard

View file

@ -12,6 +12,7 @@ from llama_stack.core.datatypes import (
Provider,
ProviderSpec,
QualifiedModel,
SafetyConfig,
ShieldInput,
ToolGroupInput,
VectorStoresConfig,
@ -256,6 +257,9 @@ def get_distribution_template(name: str = "starter") -> DistributionTemplate:
model_id="nomic-ai/nomic-embed-text-v1.5",
),
),
safety_config=SafetyConfig(
default_shield_id="llama-guard",
),
),
},
run_config_env_vars={

View file

@ -24,6 +24,7 @@ from llama_stack.core.datatypes import (
DistributionSpec,
ModelInput,
Provider,
SafetyConfig,
ShieldInput,
TelemetryConfig,
ToolGroupInput,
@ -188,6 +189,7 @@ class RunConfigSettings(BaseModel):
default_datasets: list[DatasetInput] | None = None
default_benchmarks: list[BenchmarkInput] | None = None
vector_stores_config: VectorStoresConfig | None = None
safety_config: SafetyConfig | None = None
telemetry: TelemetryConfig = Field(default_factory=lambda: TelemetryConfig(enabled=True))
storage_backends: dict[str, Any] | None = None
storage_stores: dict[str, Any] | None = None
@ -290,6 +292,9 @@ class RunConfigSettings(BaseModel):
if self.vector_stores_config:
config["vector_stores"] = self.vector_stores_config.model_dump(exclude_none=True)
if self.safety_config:
config["safety"] = self.safety_config.model_dump(exclude_none=True)
return config