chore: support default model in moderations API

# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-22 15:13:20 -07:00
parent 7b90e0e9c8
commit 4f7fedd91d
23 changed files with 189 additions and 36 deletions

View file

@ -24,6 +24,7 @@ from llama_stack.core.datatypes import (
DistributionSpec,
ModelInput,
Provider,
SafetyConfig,
ShieldInput,
TelemetryConfig,
ToolGroupInput,
@ -188,6 +189,7 @@ class RunConfigSettings(BaseModel):
default_datasets: list[DatasetInput] | None = None
default_benchmarks: list[BenchmarkInput] | None = None
vector_stores_config: VectorStoresConfig | None = None
safety_config: SafetyConfig | None = None
telemetry: TelemetryConfig = Field(default_factory=lambda: TelemetryConfig(enabled=True))
storage_backends: dict[str, Any] | None = None
storage_stores: dict[str, Any] | None = None
@ -290,6 +292,9 @@ class RunConfigSettings(BaseModel):
if self.vector_stores_config:
config["vector_stores"] = self.vector_stores_config.model_dump(exclude_none=True)
if self.safety_config:
config["safety"] = self.safety_config.model_dump(exclude_none=True)
return config