forked from phoenix-oss/llama-stack-mirror
Remove the "ShieldType" concept (#430)
# What does this PR do? This PR kills the notion of "ShieldType". The impetus for this is the realization: > Why is keyword llama-guard appearing so many times everywhere, sometimes with hyphens, sometimes with underscores? Now that we have a notion of "provider specific resource identifiers" and "user specific aliases" for those and the fact that this works with models ("Llama3.1-8B-Instruct" <> "fireworks/llama-3pv1-..."), we can follow the same rules for Shields. So each Safety provider can make up a notion of identifiers it has registered. This already happens with Bedrock correctly. We just generalize it for Llama Guard, Prompt Guard, etc. For Llama Guard, we further simplify by just adopting the underlying model name itself as the identifier! No confusion necessary. While doing this, I noticed a bug in our DistributionRegistry where we weren't scoping identifiers by type. Fixed. ## Feature/Issue validation/testing/test plan Ran (inference, safety, memory, agents) tests with ollama and fireworks providers.
This commit is contained in:
parent
09269e2a44
commit
983d6ce2df
26 changed files with 150 additions and 209 deletions
|
@ -38,7 +38,6 @@ class Dataset(CommonDatasetFields, Resource):
|
|||
return self.provider_resource_id
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class DatasetInput(CommonDatasetFields, BaseModel):
|
||||
dataset_id: str
|
||||
provider_id: Optional[str] = None
|
||||
|
|
|
@ -34,7 +34,6 @@ class EvalTask(CommonEvalTaskFields, Resource):
|
|||
return self.provider_resource_id
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class EvalTaskInput(CommonEvalTaskFields, BaseModel):
|
||||
eval_task_id: str
|
||||
provider_id: Optional[str] = None
|
||||
|
|
|
@ -122,7 +122,6 @@ MemoryBank = Annotated[
|
|||
]
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class MemoryBankInput(BaseModel):
|
||||
memory_bank_id: str
|
||||
params: BankParams
|
||||
|
|
|
@ -32,7 +32,6 @@ class Model(CommonModelFields, Resource):
|
|||
return self.provider_resource_id
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ModelInput(CommonModelFields):
|
||||
model_id: str
|
||||
provider_id: Optional[str] = None
|
||||
|
|
|
@ -27,7 +27,7 @@ async def get_client_impl(config: RemoteProviderConfig, _deps: Any) -> Safety:
|
|||
|
||||
|
||||
def encodable_dict(d: BaseModel):
|
||||
return json.loads(d.json())
|
||||
return json.loads(d.model_dump_json())
|
||||
|
||||
|
||||
class SafetyClient(Safety):
|
||||
|
@ -80,7 +80,7 @@ async def run_main(host: str, port: int, image_path: str = None):
|
|||
)
|
||||
cprint(f"User>{message.content}", "green")
|
||||
response = await client.run_shield(
|
||||
shield_id="llama_guard",
|
||||
shield_id="Llama-Guard-3-1B",
|
||||
messages=[message],
|
||||
)
|
||||
print(response)
|
||||
|
|
|
@ -96,7 +96,6 @@ class ScoringFn(CommonScoringFnFields, Resource):
|
|||
return self.provider_resource_id
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ScoringFnInput(CommonScoringFnFields, BaseModel):
|
||||
scoring_fn_id: str
|
||||
provider_id: Optional[str] = None
|
||||
|
|
|
@ -37,7 +37,6 @@ class ShieldsClient(Shields):
|
|||
async def register_shield(
|
||||
self,
|
||||
shield_id: str,
|
||||
shield_type: ShieldType,
|
||||
provider_shield_id: Optional[str],
|
||||
provider_id: Optional[str],
|
||||
params: Optional[Dict[str, Any]],
|
||||
|
@ -47,7 +46,6 @@ class ShieldsClient(Shields):
|
|||
f"{self.base_url}/shields/register",
|
||||
json={
|
||||
"shield_id": shield_id,
|
||||
"shield_type": shield_type,
|
||||
"provider_shield_id": provider_shield_id,
|
||||
"provider_id": provider_id,
|
||||
"params": params,
|
||||
|
@ -56,12 +54,12 @@ class ShieldsClient(Shields):
|
|||
)
|
||||
response.raise_for_status()
|
||||
|
||||
async def get_shield(self, shield_type: str) -> Optional[Shield]:
|
||||
async def get_shield(self, shield_id: str) -> Optional[Shield]:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/shields/get",
|
||||
params={
|
||||
"shield_type": shield_type,
|
||||
"shield_id": shield_id,
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Literal, Optional, Protocol, runtime_checkable
|
||||
|
||||
from llama_models.schema_utils import json_schema_type, webmethod
|
||||
|
@ -13,16 +12,7 @@ from pydantic import BaseModel
|
|||
from llama_stack.apis.resource import Resource, ResourceType
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ShieldType(Enum):
|
||||
generic_content_shield = "generic_content_shield"
|
||||
llama_guard = "llama_guard"
|
||||
code_scanner = "code_scanner"
|
||||
prompt_guard = "prompt_guard"
|
||||
|
||||
|
||||
class CommonShieldFields(BaseModel):
|
||||
shield_type: ShieldType
|
||||
params: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
|
@ -59,7 +49,6 @@ class Shields(Protocol):
|
|||
async def register_shield(
|
||||
self,
|
||||
shield_id: str,
|
||||
shield_type: ShieldType,
|
||||
provider_shield_id: Optional[str] = None,
|
||||
provider_id: Optional[str] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue