feat(tools): use { input_schema, output_schema } for ToolDefinition

This commit is contained in:
Ashwin Bharambe 2025-09-30 19:13:15 -07:00
parent 42414a1a1b
commit 139320e19f
20 changed files with 1989 additions and 386 deletions

View file

@ -54,7 +54,6 @@ from llama_stack.apis.inference import (
StopReason,
SystemMessage,
ToolDefinition,
ToolParamDefinition,
ToolResponse,
ToolResponseMessage,
UserMessage,
@ -790,20 +789,38 @@ class ChatAgent(ShieldRunnerMixin):
for tool_def in self.agent_config.client_tools:
if tool_name_to_def.get(tool_def.name, None):
raise ValueError(f"Tool {tool_def.name} already exists")
# Build JSON Schema from tool parameters
properties = {}
required = []
for param in tool_def.parameters:
param_schema = {
"type": param.parameter_type,
"description": param.description,
}
if param.default is not None:
param_schema["default"] = param.default
if param.items is not None:
param_schema["items"] = param.items
if param.title is not None:
param_schema["title"] = param.title
properties[param.name] = param_schema
if param.required:
required.append(param.name)
input_schema = {
"type": "object",
"properties": properties,
"required": required,
}
tool_name_to_def[tool_def.name] = ToolDefinition(
tool_name=tool_def.name,
description=tool_def.description,
parameters={
param.name: ToolParamDefinition(
param_type=param.parameter_type,
description=param.description,
required=param.required,
items=param.items,
title=param.title,
default=param.default,
)
for param in tool_def.parameters
},
input_schema=input_schema,
)
for toolgroup_name_with_maybe_tool_name in agent_config_toolgroups:
toolgroup_name, input_tool_name = self._parse_toolgroup_name(toolgroup_name_with_maybe_tool_name)
@ -835,20 +852,37 @@ class ChatAgent(ShieldRunnerMixin):
if tool_name_to_def.get(identifier, None):
raise ValueError(f"Tool {identifier} already exists")
if identifier:
# Build JSON Schema from tool parameters
properties = {}
required = []
for param in tool_def.parameters:
param_schema = {
"type": param.parameter_type,
"description": param.description,
}
if param.default is not None:
param_schema["default"] = param.default
if param.items is not None:
param_schema["items"] = param.items
if param.title is not None:
param_schema["title"] = param.title
properties[param.name] = param_schema
if param.required:
required.append(param.name)
input_schema = {
"type": "object",
"properties": properties,
"required": required,
}
tool_name_to_def[tool_def.identifier] = ToolDefinition(
tool_name=identifier,
description=tool_def.description,
parameters={
param.name: ToolParamDefinition(
param_type=param.parameter_type,
description=param.description,
required=param.required,
items=param.items,
title=param.title,
default=param.default,
)
for param in tool_def.parameters
},
input_schema=input_schema,
)
tool_name_to_args[tool_def.identifier] = toolgroup_to_args.get(toolgroup_name, {})

View file

@ -62,22 +62,38 @@ def convert_tooldef_to_chat_tool(tool_def):
ChatCompletionToolParam suitable for OpenAI chat completion
"""
from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
from llama_stack.models.llama.datatypes import ToolDefinition
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
# Build JSON Schema from tool parameters
properties = {}
required = []
for param in tool_def.parameters:
param_schema = {
"type": param.parameter_type,
"description": param.description,
}
if param.default is not None:
param_schema["default"] = param.default
if param.items is not None:
param_schema["items"] = param.items
properties[param.name] = param_schema
if param.required:
required.append(param.name)
input_schema = {
"type": "object",
"properties": properties,
"required": required,
}
internal_tool_def = ToolDefinition(
tool_name=tool_def.name,
description=tool_def.description,
parameters={
param.name: ToolParamDefinition(
param_type=param.parameter_type,
description=param.description,
required=param.required,
default=param.default,
items=param.items,
)
for param in tool_def.parameters
},
input_schema=input_schema,
)
return convert_tooldef_to_openai_tool(internal_tool_def)
@ -526,22 +542,37 @@ class StreamingResponseOrchestrator:
from openai.types.chat import ChatCompletionToolParam
from llama_stack.apis.tools import Tool
from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
from llama_stack.models.llama.datatypes import ToolDefinition
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
def make_openai_tool(tool_name: str, tool: Tool) -> ChatCompletionToolParam:
# Build JSON Schema from tool parameters
properties = {}
required = []
for param in tool.parameters:
param_schema = {
"type": param.parameter_type,
"description": param.description,
}
if param.default is not None:
param_schema["default"] = param.default
properties[param.name] = param_schema
if param.required:
required.append(param.name)
input_schema = {
"type": "object",
"properties": properties,
"required": required,
}
tool_def = ToolDefinition(
tool_name=tool_name,
description=tool.description,
parameters={
param.name: ToolParamDefinition(
param_type=param.parameter_type,
description=param.description,
required=param.required,
default=param.default,
)
for param in tool.parameters
},
input_schema=input_schema,
)
return convert_tooldef_to_openai_tool(tool_def)