forked from phoenix-oss/llama-stack-mirror
Updates to prompt for tool calls (#29)
* update system prompts to drop new line * Add tool prompt formats * support json format * JSON in caps * function_tag system prompt is also added as a user message * added docstrings for ToolPromptFormat --------- Co-authored-by: Hardik Shah <hjshah@fb.com>
This commit is contained in:
parent
0d933ac4c5
commit
b8fc4d4dee
8 changed files with 173 additions and 30 deletions
|
@ -15,6 +15,7 @@ from llama_toolchain.agentic_system.api import (
|
|||
AgenticSystemSessionCreateRequest,
|
||||
AgenticSystemToolDefinition,
|
||||
)
|
||||
from llama_toolchain.agentic_system.api.datatypes import ToolPromptFormat
|
||||
from llama_toolchain.agentic_system.client import AgenticSystemClient
|
||||
|
||||
from llama_toolchain.agentic_system.tools.custom.execute import (
|
||||
|
@ -64,6 +65,7 @@ async def get_agent_system_instance(
|
|||
custom_tools: Optional[List[Any]] = None,
|
||||
disable_safety: bool = False,
|
||||
model: str = "Meta-Llama3.1-8B-Instruct",
|
||||
tool_prompt_format: ToolPromptFormat = ToolPromptFormat.json,
|
||||
) -> AgenticSystemClientWrapper:
|
||||
custom_tools = custom_tools or []
|
||||
|
||||
|
@ -113,6 +115,7 @@ async def get_agent_system_instance(
|
|||
]
|
||||
),
|
||||
sampling_params=SamplingParams(),
|
||||
tool_prompt_format=tool_prompt_format,
|
||||
),
|
||||
)
|
||||
create_response = await api.create_agentic_system(create_request)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue