mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
support multiple system message tranlation for bedrock claude-3
This commit is contained in:
parent
6eac5c4f0a
commit
45d31e33aa
1 changed files with 7 additions and 6 deletions
|
@ -720,14 +720,15 @@ def completion(
|
|||
if provider == "anthropic":
|
||||
if model.startswith("anthropic.claude-3"):
|
||||
# Separate system prompt from rest of message
|
||||
system_prompt_idx: Optional[int] = None
|
||||
system_prompt_idx: list[int] = []
|
||||
system_messages: list[str] = []
|
||||
for idx, message in enumerate(messages):
|
||||
if message["role"] == "system":
|
||||
inference_params["system"] = message["content"]
|
||||
system_prompt_idx = idx
|
||||
break
|
||||
if system_prompt_idx is not None:
|
||||
messages.pop(system_prompt_idx)
|
||||
system_messages.append(message["content"])
|
||||
system_prompt_idx.append(idx)
|
||||
if len(system_prompt_idx) > 0:
|
||||
inference_params["system"] = '\n'.join(system_messages)
|
||||
messages = [i for j, i in enumerate(messages) if j not in system_prompt_idx]
|
||||
# Format rest of message according to anthropic guidelines
|
||||
messages = prompt_factory(
|
||||
model=model, messages=messages, custom_llm_provider="anthropic"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue