mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
fix(anthropic.py): handle multiple system prompts
This commit is contained in:
parent
275e0b94c9
commit
4dad400b57
3 changed files with 45 additions and 14 deletions
|
@ -131,18 +131,24 @@ def completion(
|
|||
)
|
||||
else:
|
||||
# Separate system prompt from rest of message
|
||||
system_prompt_idx: Optional[int] = None
|
||||
system_prompt_indices = []
|
||||
system_prompt = ""
|
||||
for idx, message in enumerate(messages):
|
||||
if message["role"] == "system":
|
||||
optional_params["system"] = message["content"]
|
||||
system_prompt_idx = idx
|
||||
break
|
||||
if system_prompt_idx is not None:
|
||||
messages.pop(system_prompt_idx)
|
||||
system_prompt += message["content"]
|
||||
system_prompt_indices.append(idx)
|
||||
if len(system_prompt_indices) > 0:
|
||||
for idx in reversed(system_prompt_indices):
|
||||
messages.pop(idx)
|
||||
if len(system_prompt) > 0:
|
||||
optional_params["system"] = system_prompt
|
||||
# Format rest of message according to anthropic guidelines
|
||||
messages = prompt_factory(
|
||||
model=model, messages=messages, custom_llm_provider="anthropic"
|
||||
)
|
||||
try:
|
||||
messages = prompt_factory(
|
||||
model=model, messages=messages, custom_llm_provider="anthropic"
|
||||
)
|
||||
except Exception as e:
|
||||
raise AnthropicError(status_code=400, message=str(e))
|
||||
|
||||
## Load Config
|
||||
config = litellm.AnthropicConfig.get_config()
|
||||
|
|
|
@ -5,6 +5,7 @@ from jinja2 import Template, exceptions, Environment, meta
|
|||
from typing import Optional, Any
|
||||
import imghdr, base64
|
||||
from typing import List
|
||||
import litellm
|
||||
|
||||
|
||||
def default_pt(messages):
|
||||
|
@ -642,11 +643,12 @@ def anthropic_messages_pt(messages: list):
|
|||
"""
|
||||
# add role=tool support to allow function call result/error submission
|
||||
user_message_types = {"user", "tool"}
|
||||
# reformat messages to ensure user/assistant are alternating, if there's either 2 consecutive 'user' messages or 2 consecutive 'assistant' message, add a blank 'user' or 'assistant' message to ensure compatibility
|
||||
# reformat messages to ensure user/assistant are alternating, if there's either 2 consecutive 'user' messages or 2 consecutive 'assistant' message, merge them.
|
||||
new_messages = []
|
||||
msg_i = 0
|
||||
while msg_i < len(messages):
|
||||
user_content = []
|
||||
## MERGE CONSECUTIVE USER CONTENT ##
|
||||
while msg_i < len(messages) and messages[msg_i]["role"] in user_message_types:
|
||||
if isinstance(messages[msg_i]["content"], list):
|
||||
for m in messages[msg_i]["content"]:
|
||||
|
@ -680,6 +682,7 @@ def anthropic_messages_pt(messages: list):
|
|||
new_messages.append({"role": "user", "content": user_content})
|
||||
|
||||
assistant_content = []
|
||||
## MERGE CONSECUTIVE ASSISTANT CONTENT ##
|
||||
while msg_i < len(messages) and messages[msg_i]["role"] == "assistant":
|
||||
assistant_text = (
|
||||
messages[msg_i].get("content") or ""
|
||||
|
@ -697,10 +700,22 @@ def anthropic_messages_pt(messages: list):
|
|||
if assistant_content:
|
||||
new_messages.append({"role": "assistant", "content": assistant_content})
|
||||
|
||||
if (
|
||||
msg_i < len(messages)
|
||||
and messages[msg_i]["role"] != user_message_types
|
||||
and messages[msg_i]["role"] != "assistant"
|
||||
):
|
||||
raise Exception(f"Invalid role passed in - {messages[msg_i]}")
|
||||
|
||||
if new_messages[0]["role"] != "user":
|
||||
new_messages.insert(
|
||||
0, {"role": "user", "content": [{"type": "text", "text": "."}]}
|
||||
)
|
||||
if litellm.modify_params:
|
||||
new_messages.insert(
|
||||
0, {"role": "user", "content": [{"type": "text", "text": "."}]}
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
"Invalid first message. Should always start with 'role'='user' for Anthropic. System prompt is sent separately for Anthropic. set 'litellm.modify_params = True' or 'litellm_settings:modify_params = True' on proxy, to insert a placeholder user message - '.' as the first message, "
|
||||
)
|
||||
|
||||
if new_messages[-1]["role"] == "assistant":
|
||||
for content in new_messages[-1]["content"]:
|
||||
|
|
|
@ -7223,7 +7223,17 @@ def exception_type(
|
|||
message=f"AnthropicException - {original_exception.message}",
|
||||
llm_provider="anthropic",
|
||||
model=model,
|
||||
response=original_exception.response,
|
||||
response=(
|
||||
original_exception.response
|
||||
if hasattr(original_exception, "response")
|
||||
else httpx.Response(
|
||||
status_code=500,
|
||||
request=httpx.Request(
|
||||
method="POST",
|
||||
url="https://docs.anthropic.com/claude/reference/messages_post",
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
else:
|
||||
exception_mapping_worked = True
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue