mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
This reverts commit 2afd922f8c
.
This commit is contained in:
parent
2afd922f8c
commit
6b5f093087
2 changed files with 4 additions and 95 deletions
|
@ -331,7 +331,7 @@ def _gemini_convert_messages_with_history( # noqa: PLR0915
|
|||
raise e
|
||||
|
||||
|
||||
def _transform_request_body( # noqa: PLR0915
|
||||
def _transform_request_body(
|
||||
messages: List[AllMessageValues],
|
||||
model: str,
|
||||
optional_params: dict,
|
||||
|
@ -342,35 +342,13 @@ def _transform_request_body( # noqa: PLR0915
|
|||
"""
|
||||
Common transformation logic across sync + async Gemini /generateContent calls.
|
||||
"""
|
||||
# Duplicate system message as user message for Gemini
|
||||
duplicate_system_as_user = optional_params.pop("duplicate_system_as_user_for_gemini", True)
|
||||
|
||||
# Check if all messages are system messages
|
||||
all_system_messages = all(message["role"] == "system" for message in messages)
|
||||
|
||||
# Separate system prompt from rest of message
|
||||
supports_system_message = get_supports_system_message(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
||||
system_instructions = None
|
||||
# If all messages are system messages, add a user message to the end
|
||||
if (all_system_messages and supports_system_message and messages):
|
||||
# Always create system instruction
|
||||
system_content = messages[0].get("content", "")
|
||||
system_part = PartType(text=system_content) # type: ignore
|
||||
system_instructions = SystemInstructions(parts=[system_part])
|
||||
|
||||
# Only duplicate as user message if flag is set
|
||||
if duplicate_system_as_user or litellm.modify_params:
|
||||
user_message = cast(AllMessageValues, {
|
||||
"role": "user",
|
||||
"content": system_content
|
||||
})
|
||||
messages = [user_message]
|
||||
else:
|
||||
messages = []
|
||||
|
||||
system_instructions, messages = _transform_system_message(
|
||||
supports_system_message=supports_system_message, messages=messages
|
||||
)
|
||||
# Checks for 'response_schema' support - if passed in
|
||||
if "response_schema" in optional_params:
|
||||
supports_response_schema = get_supports_response_schema(
|
||||
|
|
|
@ -8,7 +8,6 @@ from litellm import ModelResponse
|
|||
from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import (
|
||||
VertexGeminiConfig,
|
||||
)
|
||||
from litellm.llms.vertex_ai.gemini.transformation import _transform_request_body
|
||||
from litellm.types.utils import ChoiceLogprobs
|
||||
|
||||
|
||||
|
@ -63,71 +62,3 @@ def test_get_model_name_from_gemini_spec_model():
|
|||
model = "gemini/ft-uuid-123"
|
||||
result = VertexGeminiConfig._get_model_name_from_gemini_spec_model(model)
|
||||
assert result == "ft-uuid-123"
|
||||
|
||||
def test_system_message_conversion_gemini():
|
||||
"""Test that system-only messages are properly handled for Gemini"""
|
||||
# Case 1: Default behavior - duplicate system as user
|
||||
messages = [{"role": "system", "content": "You are a helpful assistant"}]
|
||||
|
||||
# Create mock objects for the test
|
||||
model = "gemini-2.0-flash"
|
||||
custom_llm_provider = "gemini"
|
||||
optional_params = {}
|
||||
litellm_params = {}
|
||||
|
||||
result = _transform_request_body(
|
||||
messages=messages, # type: ignore
|
||||
model=model,
|
||||
optional_params=optional_params,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
litellm_params=litellm_params,
|
||||
cached_content=None
|
||||
)
|
||||
|
||||
# Verify that contents has user message
|
||||
assert len(result["contents"]) > 0
|
||||
assert result["contents"][0]["role"] == "user" # type: ignore
|
||||
assert "system_instruction" in result
|
||||
|
||||
# Case 2: Disable duplication
|
||||
optional_params = {"duplicate_system_as_user_for_gemini": False}
|
||||
|
||||
# Save original modify_params value
|
||||
original_modify_params = litellm.modify_params
|
||||
litellm.modify_params = False
|
||||
|
||||
result_no_duplicate = _transform_request_body(
|
||||
messages=messages.copy(), # type: ignore
|
||||
model=model,
|
||||
optional_params=optional_params,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
litellm_params={},
|
||||
cached_content=None
|
||||
)
|
||||
|
||||
# Restore original modify_params value
|
||||
litellm.modify_params = original_modify_params
|
||||
|
||||
# With duplication disabled and modify_params False,
|
||||
# we'd expect an empty contents field
|
||||
# This might actually raise an exception in practice
|
||||
assert "system_instruction" in result_no_duplicate
|
||||
|
||||
# Case 3: With litellm.modify_params=True it should duplicate even with parameter set to False
|
||||
litellm.modify_params = True
|
||||
|
||||
result_with_modify_params = _transform_request_body(
|
||||
messages=messages.copy(), # type: ignore
|
||||
model=model,
|
||||
optional_params={"duplicate_system_as_user_for_gemini": False},
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
litellm_params={},
|
||||
cached_content=None
|
||||
)
|
||||
|
||||
# Restore original modify_params value
|
||||
litellm.modify_params = original_modify_params
|
||||
|
||||
# Verify that contents has user message due to modify_params=True
|
||||
assert len(result_with_modify_params["contents"]) > 0
|
||||
assert result_with_modify_params["contents"][0]["role"] == "user" # type: ignore
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue