diff --git a/litellm/llms/vertex_ai/gemini/transformation.py b/litellm/llms/vertex_ai/gemini/transformation.py index e50954b8f9..18c6e54e8c 100644 --- a/litellm/llms/vertex_ai/gemini/transformation.py +++ b/litellm/llms/vertex_ai/gemini/transformation.py @@ -284,7 +284,7 @@ def _gemini_convert_messages_with_history( # noqa: PLR0915 raise e -def _transform_request_body( +def _transform_request_body( # noqa: PLR0915 messages: List[AllMessageValues], model: str, optional_params: dict, @@ -295,13 +295,35 @@ def _transform_request_body( """ Common transformation logic across sync + async Gemini /generateContent calls. """ + # Duplicate system message as user message for Gemini + duplicate_system_as_user = optional_params.pop("duplicate_system_as_user_for_gemini", True) + + # Check if all messages are system messages + all_system_messages = all(message["role"] == "system" for message in messages) + # Separate system prompt from rest of message supports_system_message = get_supports_system_message( model=model, custom_llm_provider=custom_llm_provider ) - system_instructions, messages = _transform_system_message( - supports_system_message=supports_system_message, messages=messages - ) + + system_instructions = None + # If all messages are system messages, add a user message to the end + if (all_system_messages and supports_system_message and messages): + # Always create system instruction + system_content = messages[0].get("content", "") + system_part = PartType(text=system_content) # type: ignore + system_instructions = SystemInstructions(parts=[system_part]) + + # Only duplicate as user message if flag is set + if duplicate_system_as_user or litellm.modify_params: + user_message = cast(AllMessageValues, { + "role": "user", + "content": system_content + }) + messages = [user_message] + else: + messages = [] + # Checks for 'response_schema' support - if passed in if "response_schema" in optional_params: supports_response_schema = get_supports_response_schema( diff --git a/tests/litellm/llms/vertex_ai/gemini/test_gemini_transformation.py b/tests/litellm/llms/vertex_ai/gemini/test_gemini_transformation.py new file mode 100644 index 0000000000..cb9cdee0be --- /dev/null +++ b/tests/litellm/llms/vertex_ai/gemini/test_gemini_transformation.py @@ -0,0 +1,71 @@ +import litellm +from litellm.llms.vertex_ai.gemini.transformation import _transform_request_body + + +def test_system_message_conversion_gemini(): + """Test that system-only messages are properly handled for Gemini""" + # Case 1: Default behavior - duplicate system as user + messages = [{"role": "system", "content": "You are a helpful assistant"}] + + # Create mock objects for the test + model = "gemini-2.0-flash" + custom_llm_provider = "gemini" + optional_params = {} + litellm_params = {} + + result = _transform_request_body( + messages=messages, # type: ignore + model=model, + optional_params=optional_params, + custom_llm_provider=custom_llm_provider, + litellm_params=litellm_params, + cached_content=None + ) + + # Verify that contents has user message + assert len(result["contents"]) > 0 + assert result["contents"][0]["role"] == "user" # type: ignore + assert "system_instruction" in result + + # Case 2: Disable duplication + optional_params = {"duplicate_system_as_user_for_gemini": False} + + # Save original modify_params value + original_modify_params = litellm.modify_params + litellm.modify_params = False + + result_no_duplicate = _transform_request_body( + messages=messages.copy(), # type: ignore + model=model, + optional_params=optional_params, + custom_llm_provider=custom_llm_provider, + litellm_params={}, + cached_content=None + ) + + # Restore original modify_params value + litellm.modify_params = original_modify_params + + # With duplication disabled and modify_params False, + # we'd expect an empty contents field + # This might actually raise an exception in practice + assert "system_instruction" in result_no_duplicate + + # Case 3: With litellm.modify_params=True it should duplicate even with parameter set to False + litellm.modify_params = True + + result_with_modify_params = _transform_request_body( + messages=messages.copy(), # type: ignore + model=model, + optional_params={"duplicate_system_as_user_for_gemini": False}, + custom_llm_provider=custom_llm_provider, + litellm_params={}, + cached_content=None + ) + + # Restore original modify_params value + litellm.modify_params = original_modify_params + + # Verify that contents has user message due to modify_params=True + assert len(result_with_modify_params["contents"]) > 0 + assert result_with_modify_params["contents"][0]["role"] == "user" # type: ignore