mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Don't validate prompt-guard anymore
This commit is contained in:
parent
703ab9385f
commit
19ce6bf009
1 changed files with 0 additions and 14 deletions
|
@ -50,20 +50,6 @@ class LlamaGuardShieldConfig(BaseModel):
|
|||
class PromptGuardShieldConfig(BaseModel):
|
||||
model: str = "Prompt-Guard-86M"
|
||||
|
||||
@validator("model")
|
||||
@classmethod
|
||||
def validate_model(cls, model: str) -> str:
|
||||
permitted_models = [
|
||||
m.descriptor()
|
||||
for m in safety_models()
|
||||
if m.core_model_id == CoreModelId.prompt_guard_86m
|
||||
]
|
||||
if model not in permitted_models:
|
||||
raise ValueError(
|
||||
f"Invalid model: {model}. Must be one of {permitted_models}"
|
||||
)
|
||||
return model
|
||||
|
||||
|
||||
class SafetyConfig(BaseModel):
|
||||
llama_guard_shield: Optional[LlamaGuardShieldConfig] = None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue