From 91b43600f73985146c47cd88f453525ef3e744dd Mon Sep 17 00:00:00 2001 From: Kate Plawiak Date: Mon, 22 Jul 2024 13:58:51 -0700 Subject: [PATCH] increase max_new_tokens --- llama_toolchain/safety/shields/llama_guard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_toolchain/safety/shields/llama_guard.py b/llama_toolchain/safety/shields/llama_guard.py index a63d71844..dc7151a3e 100644 --- a/llama_toolchain/safety/shields/llama_guard.py +++ b/llama_toolchain/safety/shields/llama_guard.py @@ -230,7 +230,7 @@ class LlamaGuardShield(ShieldBase): prompt_len = input_ids.shape[1] output = self.model.generate( input_ids=input_ids, - max_new_tokens=20, + max_new_tokens=50, output_scores=True, return_dict_in_generate=True, pad_token_id=0,