mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(fix): llama-2 non-chat models prompt template
This commit is contained in:
parent
4ca4038246
commit
ce27e08e7d
1 changed files with 2 additions and 3 deletions
|
@ -279,9 +279,8 @@ def prompt_factory(model: str, messages: list, custom_llm_provider: Optional[str
|
|||
return anthropic_pt(messages=messages)
|
||||
|
||||
try:
|
||||
if "meta-llama/llama-2" in model:
|
||||
if "chat" in model:
|
||||
return llama_2_chat_pt(messages=messages)
|
||||
if "meta-llama/llama-2" in model and "chat" in model:
|
||||
return llama_2_chat_pt(messages=messages)
|
||||
elif "tiiuae/falcon" in model: # Note: for the instruct models, it's best to use a User: .., Assistant:.. approach in your prompt template.
|
||||
if model == "tiiuae/falcon-180B-chat":
|
||||
return falcon_chat_pt(messages=messages)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue