(fix): llama-2 non-chat models prompt template

This commit is contained in:
Krrish Dholakia 2023-11-07 21:32:35 -08:00
parent 4ca4038246
commit ce27e08e7d

View file

@ -279,8 +279,7 @@ def prompt_factory(model: str, messages: list, custom_llm_provider: Optional[str
return anthropic_pt(messages=messages)
try:
if "meta-llama/llama-2" in model:
if "chat" in model:
if "meta-llama/llama-2" in model and "chat" in model:
return llama_2_chat_pt(messages=messages)
elif "tiiuae/falcon" in model: # Note: for the instruct models, it's best to use a User: .., Assistant:.. approach in your prompt template.
if model == "tiiuae/falcon-180B-chat":