pre-commit

This commit is contained in:
Ashwin Bharambe 2025-10-02 14:57:58 -07:00
parent 31f211f0ad
commit be6819fa92
8 changed files with 14 additions and 34 deletions

View file

@ -7,7 +7,7 @@
from enum import Enum from enum import Enum
from typing import Any, Literal, Protocol from typing import Any, Literal, Protocol
from pydantic import BaseModel, Field from pydantic import BaseModel
from typing_extensions import runtime_checkable from typing_extensions import runtime_checkable
from llama_stack.apis.common.content_types import URL, InterleavedContent from llama_stack.apis.common.content_types import URL, InterleavedContent

View file

@ -11,6 +11,7 @@
# top-level folder for each specific model found within the models/ directory at # top-level folder for each specific model found within the models/ directory at
# the top-level of this source tree. # the top-level of this source tree.
import json
import textwrap import textwrap
from llama_stack.models.llama.datatypes import ( from llama_stack.models.llama.datatypes import (
@ -184,7 +185,7 @@ def usecases() -> list[UseCase | str]:
ToolCall( ToolCall(
call_id="tool_call_id", call_id="tool_call_id",
tool_name=BuiltinTool.wolfram_alpha, tool_name=BuiltinTool.wolfram_alpha,
arguments={"query": "100th decimal of pi"}, arguments=json.dumps({"query": "100th decimal of pi"}),
) )
], ],
), ),

View file

@ -11,6 +11,7 @@
# top-level folder for each specific model found within the models/ directory at # top-level folder for each specific model found within the models/ directory at
# the top-level of this source tree. # the top-level of this source tree.
import json
import textwrap import textwrap
from llama_stack.models.llama.datatypes import ( from llama_stack.models.llama.datatypes import (
@ -185,7 +186,7 @@ def usecases() -> list[UseCase | str]:
ToolCall( ToolCall(
call_id="tool_call_id", call_id="tool_call_id",
tool_name=BuiltinTool.wolfram_alpha, tool_name=BuiltinTool.wolfram_alpha,
arguments={"query": "100th decimal of pi"}, arguments=json.dumps({"query": "100th decimal of pi"}),
) )
], ],
), ),

View file

@ -1171,7 +1171,6 @@ async def convert_openai_chat_completion_stream(
) )
try: try:
arguments = json.loads(buffer["arguments"])
tool_call = ToolCall( tool_call = ToolCall(
call_id=buffer["call_id"], call_id=buffer["call_id"],
tool_name=buffer["name"], tool_name=buffer["name"],

View file

@ -139,12 +139,7 @@ class ToolGroupsImpl(Impl):
description="Test tool", description="Test tool",
input_schema={ input_schema={
"type": "object", "type": "object",
"properties": { "properties": {"test-param": {"type": "string", "description": "Test param"}},
"test-param": {
"type": "string",
"description": "Test param"
}
}
}, },
) )
] ]

View file

@ -16,7 +16,6 @@ from llama_stack.apis.agents import (
) )
from llama_stack.apis.common.responses import PaginatedResponse from llama_stack.apis.common.responses import PaginatedResponse
from llama_stack.apis.inference import Inference from llama_stack.apis.inference import Inference
from llama_stack.apis.resource import ResourceType
from llama_stack.apis.safety import Safety from llama_stack.apis.safety import Safety
from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroups, ToolRuntime from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroups, ToolRuntime
from llama_stack.apis.vector_io import VectorIO from llama_stack.apis.vector_io import VectorIO
@ -241,20 +240,16 @@ async def test__initialize_tools(agents_impl, sample_agent_config):
input_schema={ input_schema={
"type": "object", "type": "object",
"properties": { "properties": {
"story_title": { "story_title": {"type": "string", "description": "Title of the story", "title": "Story Title"},
"type": "string",
"description": "Title of the story",
"title": "Story Title"
},
"input_words": { "input_words": {
"type": "array", "type": "array",
"description": "Input words", "description": "Input words",
"items": {"type": "string"}, "items": {"type": "string"},
"title": "Input Words", "title": "Input Words",
"default": [] "default": [],
}
}, },
"required": ["story_title"] },
"required": ["story_title"],
}, },
) )
] ]

View file

@ -192,13 +192,8 @@ async def test_create_openai_response_with_string_input_with_tools(openai_respon
description="Search the web for information", description="Search the web for information",
input_schema={ input_schema={
"type": "object", "type": "object",
"properties": { "properties": {"query": {"type": "string", "description": "The query to search for"}},
"query": { "required": ["query"],
"type": "string",
"description": "The query to search for"
}
},
"required": ["query"]
}, },
) )

View file

@ -22,14 +22,8 @@ def test_convert_tooldef_to_chat_tool_preserves_items_field():
description="A test tool with array parameter", description="A test tool with array parameter",
input_schema={ input_schema={
"type": "object", "type": "object",
"properties": { "properties": {"tags": {"type": "array", "description": "List of tags", "items": {"type": "string"}}},
"tags": { "required": ["tags"],
"type": "array",
"description": "List of tags",
"items": {"type": "string"}
}
},
"required": ["tags"]
}, },
) )