fix Llama models message to prompt conversion in for AWS Bedrock provider

This commit is contained in:
aswny 2024-04-25 17:19:55 +00:00
parent d4d81dce01
commit 1e7a6c5d1f
2 changed files with 14 additions and 0 deletions

View file

@ -653,6 +653,10 @@ def convert_messages_to_prompt(model, messages, provider, custom_prompt_dict):
prompt = prompt_factory(
model=model, messages=messages, custom_llm_provider="bedrock"
)
elif provider == "meta":
prompt = prompt_factory(
model=model, messages=messages, custom_llm_provider="bedrock"
)
else:
prompt = ""
for message in messages: