From ce27e08e7df970c9be7dda7bf8e4641fe8e4d4ce Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 7 Nov 2023 21:32:35 -0800 Subject: [PATCH] (fix): llama-2 non-chat models prompt template --- litellm/llms/prompt_templates/factory.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/litellm/llms/prompt_templates/factory.py b/litellm/llms/prompt_templates/factory.py index 959b8759f4..4b616b785b 100644 --- a/litellm/llms/prompt_templates/factory.py +++ b/litellm/llms/prompt_templates/factory.py @@ -279,9 +279,8 @@ def prompt_factory(model: str, messages: list, custom_llm_provider: Optional[str return anthropic_pt(messages=messages) try: - if "meta-llama/llama-2" in model: - if "chat" in model: - return llama_2_chat_pt(messages=messages) + if "meta-llama/llama-2" in model and "chat" in model: + return llama_2_chat_pt(messages=messages) elif "tiiuae/falcon" in model: # Note: for the instruct models, it's best to use a User: .., Assistant:.. approach in your prompt template. if model == "tiiuae/falcon-180B-chat": return falcon_chat_pt(messages=messages)