chore(api): add mypy coverage to chat_format (#2654)

# What does this PR do?
<!-- Provide a short summary of what this PR does and why. Link to
relevant issues if applicable. -->
This PR adds static type coverage to `llama-stack`

Part of https://github.com/meta-llama/llama-stack/issues/2647

<!-- If resolving an issue, uncomment and update the line below -->
<!-- Closes #[issue-number] -->

## Test Plan
<!-- Describe the tests you ran to verify your changes with result
summaries. *Provide clear instructions so the plan can be easily
re-executed.* -->

Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
Mustafa Elbehery 2025-07-18 11:56:53 +02:00 committed by GitHub
parent 75480b01b8
commit ca7edcd6a4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 15 additions and 5 deletions

View file

@ -8,6 +8,7 @@ import io
import json import json
import uuid import uuid
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any
from PIL import Image as PIL_Image from PIL import Image as PIL_Image
@ -184,16 +185,26 @@ class ChatFormat:
content = content[: -len("<|eom_id|>")] content = content[: -len("<|eom_id|>")]
stop_reason = StopReason.end_of_message stop_reason = StopReason.end_of_message
tool_name = None tool_name: str | BuiltinTool | None = None
tool_arguments = {} tool_arguments: dict[str, Any] = {}
custom_tool_info = ToolUtils.maybe_extract_custom_tool_call(content) custom_tool_info = ToolUtils.maybe_extract_custom_tool_call(content)
if custom_tool_info is not None: if custom_tool_info is not None:
tool_name, tool_arguments = custom_tool_info # Type guard: ensure custom_tool_info is a tuple of correct types
if isinstance(custom_tool_info, tuple) and len(custom_tool_info) == 2:
extracted_tool_name, extracted_tool_arguments = custom_tool_info
# Handle both dict and str return types from the function
if isinstance(extracted_tool_arguments, dict):
tool_name, tool_arguments = extracted_tool_name, extracted_tool_arguments
else:
# If it's a string, treat it as a query parameter
tool_name, tool_arguments = extracted_tool_name, {"query": extracted_tool_arguments}
else:
tool_name, tool_arguments = None, {}
# Sometimes when agent has custom tools alongside builin tools # Sometimes when agent has custom tools alongside builin tools
# Agent responds for builtin tool calls in the format of the custom tools # Agent responds for builtin tool calls in the format of the custom tools
# This code tries to handle that case # This code tries to handle that case
if tool_name in BuiltinTool.__members__: if tool_name is not None and tool_name in BuiltinTool.__members__:
tool_name = BuiltinTool[tool_name] tool_name = BuiltinTool[tool_name]
if isinstance(tool_arguments, dict): if isinstance(tool_arguments, dict):
tool_arguments = { tool_arguments = {

View file

@ -242,7 +242,6 @@ exclude = [
"^llama_stack/distribution/store/registry\\.py$", "^llama_stack/distribution/store/registry\\.py$",
"^llama_stack/distribution/utils/exec\\.py$", "^llama_stack/distribution/utils/exec\\.py$",
"^llama_stack/distribution/utils/prompt_for_config\\.py$", "^llama_stack/distribution/utils/prompt_for_config\\.py$",
"^llama_stack/models/llama/llama3/chat_format\\.py$",
"^llama_stack/models/llama/llama3/interface\\.py$", "^llama_stack/models/llama/llama3/interface\\.py$",
"^llama_stack/models/llama/llama3/tokenizer\\.py$", "^llama_stack/models/llama/llama3/tokenizer\\.py$",
"^llama_stack/models/llama/llama3/tool_utils\\.py$", "^llama_stack/models/llama/llama3/tool_utils\\.py$",