forked from phoenix-oss/llama-stack-mirror
Updates to prompt for tool calls (#29)
* update system prompts to drop new line * Add tool prompt formats * support json format * JSON in caps * function_tag system prompt is also added as a user message * added docstrings for ToolPromptFormat --------- Co-authored-by: Hardik Shah <hjshah@fb.com>
This commit is contained in:
parent
0d933ac4c5
commit
b8fc4d4dee
8 changed files with 173 additions and 30 deletions
|
@ -5,21 +5,27 @@
|
|||
# the root directory of this source tree.
|
||||
|
||||
import json
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from llama_toolchain.agentic_system.api.datatypes import ToolPromptFormat
|
||||
|
||||
from llama_toolchain.inference.api import (
|
||||
BuiltinTool,
|
||||
Message,
|
||||
SystemMessage,
|
||||
ToolDefinition,
|
||||
UserMessage,
|
||||
)
|
||||
|
||||
from .tools.builtin import SingleMessageBuiltinTool
|
||||
|
||||
|
||||
def get_agentic_prefix_messages(
|
||||
builtin_tools: List[SingleMessageBuiltinTool], custom_tools: List[ToolDefinition]
|
||||
builtin_tools: List[SingleMessageBuiltinTool],
|
||||
custom_tools: List[ToolDefinition],
|
||||
tool_prompt_format: ToolPromptFormat,
|
||||
) -> List[Message]:
|
||||
messages = []
|
||||
content = ""
|
||||
|
@ -34,28 +40,52 @@ def get_agentic_prefix_messages(
|
|||
]
|
||||
)
|
||||
if tool_str:
|
||||
content += f"Tools: {tool_str}\n"
|
||||
content += f"Tools: {tool_str}"
|
||||
|
||||
current_date = datetime.now()
|
||||
formatted_date = current_date.strftime("%d %B %Y")
|
||||
date_str = f"""
|
||||
Cutting Knowledge Date: December 2023
|
||||
Today Date: {formatted_date}\n\n"""
|
||||
Today Date: {formatted_date}\n"""
|
||||
content += date_str
|
||||
messages.append(SystemMessage(content=content))
|
||||
|
||||
if custom_tools:
|
||||
custom_message = get_system_prompt_for_custom_tools(custom_tools)
|
||||
content += custom_message
|
||||
if tool_prompt_format == ToolPromptFormat.function_tag:
|
||||
text = prompt_for_function_tag(custom_tools)
|
||||
messages.append(UserMessage(content=text))
|
||||
elif tool_prompt_format == ToolPromptFormat.json:
|
||||
text = prompt_for_json(custom_tools)
|
||||
messages.append(UserMessage(content=text))
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Tool prompt format {tool_prompt_format} is not supported"
|
||||
)
|
||||
else:
|
||||
messages.append(SystemMessage(content=content))
|
||||
|
||||
# TODO: Replace this hard coded message with instructions coming in the request
|
||||
if False:
|
||||
content += "You are a helpful Assistant."
|
||||
|
||||
messages.append(SystemMessage(content=content))
|
||||
return messages
|
||||
|
||||
|
||||
def get_system_prompt_for_custom_tools(custom_tools: List[ToolDefinition]) -> str:
|
||||
def prompt_for_json(custom_tools: List[ToolDefinition]) -> str:
|
||||
tool_defs = "\n".join(
|
||||
translate_custom_tool_definition_to_json(t) for t in custom_tools
|
||||
)
|
||||
content = textwrap.dedent(
|
||||
"""
|
||||
Answer the user's question by making use of the following functions if needed.
|
||||
If none of the function can be used, please say so.
|
||||
Here is a list of functions in JSON format:
|
||||
{tool_defs}
|
||||
|
||||
Return function calls in JSON format.
|
||||
"""
|
||||
)
|
||||
content = content.lstrip("\n").format(tool_defs=tool_defs)
|
||||
return content
|
||||
|
||||
|
||||
def prompt_for_function_tag(custom_tools: List[ToolDefinition]) -> str:
|
||||
custom_tool_params = ""
|
||||
for t in custom_tools:
|
||||
custom_tool_params += get_instruction_string(t) + "\n"
|
||||
|
@ -76,7 +106,6 @@ Reminder:
|
|||
- Required parameters MUST be specified
|
||||
- Only call one function at a time
|
||||
- Put the entire function call reply on one line
|
||||
|
||||
"""
|
||||
return content
|
||||
|
||||
|
@ -98,7 +127,6 @@ def get_parameters_string(custom_tool_definition) -> str:
|
|||
)
|
||||
|
||||
|
||||
# NOTE: Unused right now
|
||||
def translate_custom_tool_definition_to_json(tool_def):
|
||||
"""Translates ToolDefinition to json as expected by model
|
||||
eg. output for a function
|
||||
|
@ -149,4 +177,4 @@ def translate_custom_tool_definition_to_json(tool_def):
|
|||
else:
|
||||
func_def["function"]["parameters"] = {}
|
||||
|
||||
return json.dumps(func_def)
|
||||
return json.dumps(func_def, indent=4)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue