fix(anthropic.py): support openai system message being a list

This commit is contained in:
Krrish Dholakia 2024-07-23 21:44:24 -07:00
parent cb4c42e061
commit fb0a13c8bb
3 changed files with 14 additions and 6 deletions

View file

@ -780,7 +780,16 @@ class AnthropicChatCompletion(BaseLLM):
system_prompt = "" system_prompt = ""
for idx, message in enumerate(messages): for idx, message in enumerate(messages):
if message["role"] == "system": if message["role"] == "system":
valid_content: bool = False
if isinstance(message["content"], str):
system_prompt += message["content"] system_prompt += message["content"]
valid_content = True
elif isinstance(message["content"], list):
for content in message["content"]:
system_prompt += content.get("text", "")
valid_content = True
if valid_content:
system_prompt_indices.append(idx) system_prompt_indices.append(idx)
if len(system_prompt_indices) > 0: if len(system_prompt_indices) > 0:
for idx in reversed(system_prompt_indices): for idx in reversed(system_prompt_indices):

View file

@ -1,8 +1,7 @@
model_list: model_list:
- model_name: groq-llama3 - model_name: anthropic-claude
litellm_params: litellm_params:
model: groq/llama3-groq-70b-8192-tool-use-preview model: claude-3-haiku-20240307
api_key: os.environ/GROQ_API_KEY
litellm_settings: litellm_settings:
callbacks: ["logfire"] callbacks: ["logfire"]

View file

@ -346,7 +346,7 @@ def test_completion_claude_3_empty_response():
messages = [ messages = [
{ {
"role": "system", "role": "system",
"content": "You are 2twNLGfqk4GMOn3ffp4p.", "content": [{"type": "text", "text": "You are 2twNLGfqk4GMOn3ffp4p."}],
}, },
{"role": "user", "content": "Hi gm!", "name": "ishaan"}, {"role": "user", "content": "Hi gm!", "name": "ishaan"},
{"role": "assistant", "content": "Good morning! How are you doing today?"}, {"role": "assistant", "content": "Good morning! How are you doing today?"},