Use inference APIs for executing Llama Guard

This commit is contained in:
Ashwin Bharambe 2024-09-25 19:40:49 -07:00
parent 6236634d84
commit 37ca22cda6
3 changed files with 94 additions and 164 deletions

View file

@ -88,10 +88,10 @@ class MetaReferenceSafetyImpl(Safety):
assert (
cfg is not None
), "Cannot use LlamaGuardShield since not present in config"
model_dir = resolve_and_get_path(cfg.model)
return LlamaGuardShield(
model_dir=model_dir,
model=cfg.model,
inference_api=self.inference_api,
excluded_categories=cfg.excluded_categories,
disable_input_check=cfg.disable_input_check,
disable_output_check=cfg.disable_output_check,