diff --git a/litellm/litellm_core_utils/prompt_templates/factory.py b/litellm/litellm_core_utils/prompt_templates/factory.py index df7aa2cbd0..182f3618b8 100644 --- a/litellm/litellm_core_utils/prompt_templates/factory.py +++ b/litellm/litellm_core_utils/prompt_templates/factory.py @@ -219,6 +219,31 @@ def ollama_pt( if user_content_str: prompt += f"### User:\n{user_content_str}\n\n" + system_content_str = "" + ## MERGE CONSECUTIVE SYSTEM CONTENT ## + while ( + msg_i < len(messages) and messages[msg_i]["role"] == "system" + ): + msg_content = messages[msg_i].get("content") + if msg_content: + if isinstance(msg_content, list): + for m in msg_content: + if m.get("type", "") == "image_url": + if isinstance(m["image_url"], str): + images.append(m["image_url"]) + elif isinstance(m["image_url"], dict): + images.append(m["image_url"]["url"]) + elif m.get("type", "") == "text": + system_content_str += m["text"] + else: + # Tool message content will always be a string + system_content_str += msg_content + + msg_i += 1 + + if system_content_str: + prompt += f"### System:\n{system_content_str}\n\n" + assistant_content_str = "" ## MERGE CONSECUTIVE ASSISTANT CONTENT ## while msg_i < len(messages) and messages[msg_i]["role"] == "assistant": diff --git a/tests/llm_translation/test_prompt_factory.py b/tests/llm_translation/test_prompt_factory.py index 514b5f68f3..5d450f264b 100644 --- a/tests/llm_translation/test_prompt_factory.py +++ b/tests/llm_translation/test_prompt_factory.py @@ -743,3 +743,13 @@ def test_hf_chat_template(): chat_template.rstrip() == """<|begin▁of▁sentence|>You are a helpful assistant.<|User|>What is the weather in Copenhagen?<|Assistant|>""" ) + + +def test_ollama_pt(): + from litellm.litellm_core_utils.prompt_templates.factory import ollama_pt + + messages = [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Hello!"}, + ] + ollama_pt(model="ollama/llama3.1:8b", messages=messages) \ No newline at end of file