diff --git a/litellm/llms/prompt_templates/factory.py b/litellm/llms/prompt_templates/factory.py index 43fbe8724..7b652a398 100644 --- a/litellm/llms/prompt_templates/factory.py +++ b/litellm/llms/prompt_templates/factory.py @@ -99,12 +99,16 @@ def ollama_pt( def mistral_instruct_pt(messages): + # Following the Mistral example's https://huggingface.co/docs/transformers/main/chat_templating prompt = custom_prompt( initial_prompt_value="", role_dict={ - "system": {"pre_message": "[INST]", "post_message": "[/INST]"}, - "user": {"pre_message": "[INST]", "post_message": "[/INST]"}, - "assistant": {"pre_message": "[INST]", "post_message": "[/INST]"}, + "system": { + "pre_message": "[INST] <>\n", + "post_message": "<> [/INST]\n", + }, + "user": {"pre_message": "[INST] ", "post_message": " [/INST]\n"}, + "assistant": {"pre_message": " ", "post_message": " "}, }, final_prompt_value="", messages=messages,