add response format to signature

This commit is contained in:
Ashwin Bharambe 2024-10-21 19:14:52 -07:00 committed by Ashwin Bharambe
parent 6d26bbdce3
commit 40ba22f4c8
15 changed files with 93 additions and 32 deletions

View file

@ -70,11 +70,25 @@ def chat_completion_request_to_messages(
and is_multimodal(model.core_model_id)
):
# llama3.1 and llama3.2 multimodal models follow the same tool prompt format
return augment_messages_for_tools_llama_3_1(request)
messages = augment_messages_for_tools_llama_3_1(request)
elif model.model_family == ModelFamily.llama3_2:
return augment_messages_for_tools_llama_3_2(request)
messages = augment_messages_for_tools_llama_3_2(request)
else:
return request.messages
messages = request.messages
if fmt := request.response_format:
if fmt.type == ResponseFormatType.json:
messages.append(
UserMessage(
content=f"Please response in JSON format with the schema: {json.dumps(fmt.schema)}"
)
)
elif fmt.type == ResponseFormatType.grammar:
raise NotImplementedError("Grammar response format not supported yet")
else:
raise ValueError(f"Unknown response format {fmt.type}")
return messages
def augment_messages_for_tools_llama_3_1(