fix(factory.py): handle message content being a list instead of string

Fixes https://github.com/BerriAI/litellm/issues/4679
This commit is contained in:
Krrish Dholakia 2024-07-12 18:59:26 -07:00
parent 70b96d12e9
commit 0decc36bed
2 changed files with 23 additions and 4 deletions

View file

@ -2393,7 +2393,16 @@ def custom_prompt(
if role in role_dict and "post_message" in role_dict[role]
else ""
)
prompt += pre_message_str + message["content"] + post_message_str
if isinstance(message["content"], str):
prompt += pre_message_str + message["content"] + post_message_str
elif isinstance(message["content"], list):
text_str = ""
for content in message["content"]:
if content.get("text", None) is not None and isinstance(
content["text"], str
):
text_str += content["text"]
prompt += pre_message_str + text_str + post_message_str
if role == "assistant":
prompt += eos_token

View file

@ -599,9 +599,19 @@ def test_bedrock_claude_3(image_url):
@pytest.mark.parametrize(
"system", ["You are an AI", [{"type": "text", "text": "You are an AI"}]]
"system",
["You are an AI", [{"type": "text", "text": "You are an AI"}]],
)
def test_bedrock_claude_3_system_prompt(system):
@pytest.mark.parametrize(
"model",
[
"anthropic.claude-3-sonnet-20240229-v1:0",
"meta.llama3-70b-instruct-v1:0",
"anthropic.claude-v2",
"mistral.mixtral-8x7b-instruct-v0:1",
],
)
def test_bedrock_system_prompt(system, model):
try:
litellm.set_verbose = True
data = {
@ -614,7 +624,7 @@ def test_bedrock_claude_3_system_prompt(system):
],
}
response: ModelResponse = completion(
model="bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
model="bedrock/{}".format(model),
**data,
) # type: ignore
# Add any assertions here to check the response