fix: pass tool_prompt_format to chat_formatter (#1198)

Summary:

Need this to format the completion message with tool_calls correctly.
See added unittest.

Test Plan:

python -m unittest
llama_stack.providers.tests.inference.test_prompt_adapter
This commit is contained in:
ehhuang 2025-02-20 21:38:35 -08:00 committed by GitHub
parent 33a64eb5ec
commit cfa752fc92
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 50 additions and 2 deletions

View file

@ -252,7 +252,9 @@ async def chat_completion_request_to_prompt(request: ChatCompletionRequest, llam
request = await convert_request_to_raw(request)
formatter = ChatFormat(tokenizer=Tokenizer.get_instance())
model_input = formatter.encode_dialog_prompt(request.messages)
model_input = formatter.encode_dialog_prompt(
request.messages, tool_prompt_format=request.tool_config.tool_prompt_format
)
return formatter.tokenizer.decode(model_input.tokens)
@ -264,7 +266,9 @@ async def chat_completion_request_to_model_input_info(
request = await convert_request_to_raw(request)
formatter = ChatFormat(tokenizer=Tokenizer.get_instance())
model_input = formatter.encode_dialog_prompt(request.messages)
model_input = formatter.encode_dialog_prompt(
request.messages, tool_prompt_format=request.tool_config.tool_prompt_format
)
return (
formatter.tokenizer.decode(model_input.tokens),
len(model_input.tokens),