feat: convert typehints from client_tool to litellm format (#1565)

Summary:
supports
https://github.com/meta-llama/llama-stack-client-python/pull/193

Test Plan:
LLAMA_STACK_CONFIG=fireworks pytest -s -v
tests/integration/agents/test_agents.py --safety-shield
meta-llama/Llama-Guard-3-8B --text-model
meta-llama/Llama-3.1-8B-Instruct
This commit is contained in:
ehhuang 2025-03-11 20:02:11 -07:00 committed by GitHub
parent 2370e826bc
commit 59dddafd12
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -615,6 +615,14 @@ def convert_tool_call(
return valid_tool_call return valid_tool_call
PYTHON_TYPE_TO_LITELLM_TYPE = {
"int": "integer",
"float": "number",
"bool": "boolean",
"str": "string",
}
def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict: def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict:
""" """
Convert a ToolDefinition to an OpenAI API-compatible dictionary. Convert a ToolDefinition to an OpenAI API-compatible dictionary.
@ -675,7 +683,7 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict:
properties = parameters["properties"] properties = parameters["properties"]
required = [] required = []
for param_name, param in tool.parameters.items(): for param_name, param in tool.parameters.items():
properties[param_name] = {"type": param.param_type} properties[param_name] = {"type": PYTHON_TYPE_TO_LITELLM_TYPE.get(param.param_type, param.param_type)}
if param.description: if param.description:
properties[param_name].update(description=param.description) properties[param_name].update(description=param.description)
if param.default: if param.default: