mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
support multiple system message tranlation for bedrock claude-3
This commit is contained in:
parent
6eac5c4f0a
commit
45d31e33aa
1 changed files with 7 additions and 6 deletions
|
@ -720,14 +720,15 @@ def completion(
|
||||||
if provider == "anthropic":
|
if provider == "anthropic":
|
||||||
if model.startswith("anthropic.claude-3"):
|
if model.startswith("anthropic.claude-3"):
|
||||||
# Separate system prompt from rest of message
|
# Separate system prompt from rest of message
|
||||||
system_prompt_idx: Optional[int] = None
|
system_prompt_idx: list[int] = []
|
||||||
|
system_messages: list[str] = []
|
||||||
for idx, message in enumerate(messages):
|
for idx, message in enumerate(messages):
|
||||||
if message["role"] == "system":
|
if message["role"] == "system":
|
||||||
inference_params["system"] = message["content"]
|
system_messages.append(message["content"])
|
||||||
system_prompt_idx = idx
|
system_prompt_idx.append(idx)
|
||||||
break
|
if len(system_prompt_idx) > 0:
|
||||||
if system_prompt_idx is not None:
|
inference_params["system"] = '\n'.join(system_messages)
|
||||||
messages.pop(system_prompt_idx)
|
messages = [i for j, i in enumerate(messages) if j not in system_prompt_idx]
|
||||||
# Format rest of message according to anthropic guidelines
|
# Format rest of message according to anthropic guidelines
|
||||||
messages = prompt_factory(
|
messages = prompt_factory(
|
||||||
model=model, messages=messages, custom_llm_provider="anthropic"
|
model=model, messages=messages, custom_llm_provider="anthropic"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue