diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html
index aa43333be..7afa788bc 100644
--- a/docs/_static/llama-stack-spec.html
+++ b/docs/_static/llama-stack-spec.html
@@ -8993,9 +8993,163 @@
"title": "OpenAIResponsesTool"
},
"OpenAIResponsesToolChoice": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/ToolChoiceOptions"
+ },
+ {
+ "$ref": "#/components/schemas/ToolChoiceTypes"
+ },
+ {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/ToolChoiceAllowed"
+ },
+ {
+ "$ref": "#/components/schemas/ToolChoiceFunction"
+ },
+ {
+ "$ref": "#/components/schemas/ToolChoiceMcp"
+ },
+ {
+ "$ref": "#/components/schemas/ToolChoiceCustom"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "allowed_tools": "#/components/schemas/ToolChoiceAllowed",
+ "function": "#/components/schemas/ToolChoiceFunction",
+ "mcp": "#/components/schemas/ToolChoiceMcp",
+ "custom": "#/components/schemas/ToolChoiceCustom"
+ }
+ }
+ }
+ ]
+ },
+ "ToolChoiceAllowed": {
"type": "object",
- "title": "OpenAIResponsesToolChoice",
- "description": "Type alias.\nType aliases are created through the type statement::\n\n type Alias = int\n\nIn this example, Alias and int will be treated equivalently by static\ntype checkers.\n\nAt runtime, Alias is an instance of TypeAliasType. The __name__\nattribute holds the name of the type alias. The value of the type alias\nis stored in the __value__ attribute. It is evaluated lazily, so the\nvalue is computed only if the attribute is accessed.\n\nType aliases can also be generic::\n\n type ListOrSet[T] = list[T] | set[T]\n\nIn this case, the type parameters of the alias are stored in the\n__type_params__ attribute.\n\nSee PEP 695 for more information."
+ "properties": {
+ "mode": {
+ "type": "string",
+ "enum": [
+ "auto",
+ "required"
+ ]
+ },
+ "tools": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "object",
+ "title": "object",
+ "description": "The base class of the class hierarchy.\nWhen called, it accepts no arguments and returns a new featureless\ninstance that has no instance attributes and cannot be given any."
+ }
+ }
+ },
+ "type": {
+ "type": "string",
+ "const": "allowed_tools",
+ "default": "allowed_tools"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "mode",
+ "tools",
+ "type"
+ ],
+ "title": "ToolChoiceAllowed"
+ },
+ "ToolChoiceCustom": {
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string",
+ "const": "custom",
+ "default": "custom"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "type"
+ ],
+ "title": "ToolChoiceCustom"
+ },
+ "ToolChoiceFunction": {
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string",
+ "const": "function",
+ "default": "function"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "type"
+ ],
+ "title": "ToolChoiceFunction"
+ },
+ "ToolChoiceMcp": {
+ "type": "object",
+ "properties": {
+ "server_label": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string",
+ "const": "mcp",
+ "default": "mcp"
+ },
+ "name": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "server_label",
+ "type"
+ ],
+ "title": "ToolChoiceMcp"
+ },
+ "ToolChoiceOptions": {
+ "type": "string",
+ "enum": [
+ "none",
+ "auto",
+ "required"
+ ]
+ },
+ "ToolChoiceTypes": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "file_search",
+ "web_search_preview",
+ "computer_use_preview",
+ "web_search_preview_2025_03_11",
+ "image_generation",
+ "code_interpreter"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type"
+ ],
+ "title": "ToolChoiceTypes"
},
"OpenAIResponseContentPart": {
"oneOf": [
diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml
index 9b89c34a5..06d420126 100644
--- a/docs/_static/llama-stack-spec.yaml
+++ b/docs/_static/llama-stack-spec.yaml
@@ -6589,39 +6589,118 @@ components:
additionalProperties: false
title: OpenAIResponsesTool
OpenAIResponsesToolChoice:
+ oneOf:
+ - $ref: '#/components/schemas/ToolChoiceOptions'
+ - $ref: '#/components/schemas/ToolChoiceTypes'
+ - oneOf:
+ - $ref: '#/components/schemas/ToolChoiceAllowed'
+ - $ref: '#/components/schemas/ToolChoiceFunction'
+ - $ref: '#/components/schemas/ToolChoiceMcp'
+ - $ref: '#/components/schemas/ToolChoiceCustom'
+ discriminator:
+ propertyName: type
+ mapping:
+ allowed_tools: '#/components/schemas/ToolChoiceAllowed'
+ function: '#/components/schemas/ToolChoiceFunction'
+ mcp: '#/components/schemas/ToolChoiceMcp'
+ custom: '#/components/schemas/ToolChoiceCustom'
+ ToolChoiceAllowed:
type: object
- title: OpenAIResponsesToolChoice
- description: >-
- Type alias.
+ properties:
+ mode:
+ type: string
+ enum:
+ - auto
+ - required
+ tools:
+ type: array
+ items:
+ type: object
+ additionalProperties:
+ type: object
+ title: object
+ description: >-
+ The base class of the class hierarchy.
- Type aliases are created through the type statement::
+ When called, it accepts no arguments and returns a new featureless
- type Alias = int
-
- In this example, Alias and int will be treated equivalently by static
-
- type checkers.
-
-
- At runtime, Alias is an instance of TypeAliasType. The __name__
-
- attribute holds the name of the type alias. The value of the type alias
-
- is stored in the __value__ attribute. It is evaluated lazily, so the
-
- value is computed only if the attribute is accessed.
-
-
- Type aliases can also be generic::
-
- type ListOrSet[T] = list[T] | set[T]
-
- In this case, the type parameters of the alias are stored in the
-
- __type_params__ attribute.
-
-
- See PEP 695 for more information.
+ instance that has no instance attributes and cannot be given any.
+ type:
+ type: string
+ const: allowed_tools
+ default: allowed_tools
+ additionalProperties: false
+ required:
+ - mode
+ - tools
+ - type
+ title: ToolChoiceAllowed
+ ToolChoiceCustom:
+ type: object
+ properties:
+ name:
+ type: string
+ type:
+ type: string
+ const: custom
+ default: custom
+ additionalProperties: false
+ required:
+ - name
+ - type
+ title: ToolChoiceCustom
+ ToolChoiceFunction:
+ type: object
+ properties:
+ name:
+ type: string
+ type:
+ type: string
+ const: function
+ default: function
+ additionalProperties: false
+ required:
+ - name
+ - type
+ title: ToolChoiceFunction
+ ToolChoiceMcp:
+ type: object
+ properties:
+ server_label:
+ type: string
+ type:
+ type: string
+ const: mcp
+ default: mcp
+ name:
+ type: string
+ additionalProperties: false
+ required:
+ - server_label
+ - type
+ title: ToolChoiceMcp
+ ToolChoiceOptions:
+ type: string
+ enum:
+ - none
+ - auto
+ - required
+ ToolChoiceTypes:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - file_search
+ - web_search_preview
+ - computer_use_preview
+ - web_search_preview_2025_03_11
+ - image_generation
+ - code_interpreter
+ additionalProperties: false
+ required:
+ - type
+ title: ToolChoiceTypes
OpenAIResponseContentPart:
oneOf:
- $ref: '#/components/schemas/OpenAIResponseContentPartOutputText'
diff --git a/llama_stack/apis/agents/openai_responses.py b/llama_stack/apis/agents/openai_responses.py
index 1783349c3..604109a1f 100644
--- a/llama_stack/apis/agents/openai_responses.py
+++ b/llama_stack/apis/agents/openai_responses.py
@@ -4,7 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from typing import Annotated, Any, Literal, Union
+from typing import Annotated, Any, Literal
from pydantic import BaseModel, Field
from typing_extensions import TypedDict
@@ -14,21 +14,20 @@ from llama_stack.apis.tools.openai_tool_choice import (
ToolChoiceCustom,
ToolChoiceFunction,
ToolChoiceMcp,
+ ToolChoiceOptions,
ToolChoiceTypes,
)
from llama_stack.apis.vector_io import SearchRankingOptions as FileSearchRankingOptions
from llama_stack.schema_utils import json_schema_type, register_schema
-type OpenAIResponsesToolChoice = Annotated[
- Union[
- ToolChoiceTypes,
- ToolChoiceAllowed,
- ToolChoiceFunction,
- ToolChoiceMcp,
- ToolChoiceCustom
- ],
- Field(discriminator="type"),
-]
+OpenAIResponsesToolChoice = (
+ ToolChoiceOptions
+ | ToolChoiceTypes # Multiple type values - can't use a discriminator here
+ | Annotated[
+ ToolChoiceAllowed | ToolChoiceFunction | ToolChoiceMcp | ToolChoiceCustom,
+ Field(discriminator="type"),
+ ]
+)
register_schema(OpenAIResponsesToolChoice, name="OpenAIResponsesToolChoice")
diff --git a/llama_stack/apis/tools/openai_tool_choice.py b/llama_stack/apis/tools/openai_tool_choice.py
index c7ab4a417..d5279be62 100644
--- a/llama_stack/apis/tools/openai_tool_choice.py
+++ b/llama_stack/apis/tools/openai_tool_choice.py
@@ -10,7 +10,7 @@ from pydantic import BaseModel
from llama_stack.schema_utils import json_schema_type, register_schema
-type ToolChoiceOptions = Literal["none", "auto", "required"]
+ToolChoiceOptions = Literal["none", "auto", "required"]
register_schema(ToolChoiceOptions, name="ToolChoiceOptions")
@@ -24,7 +24,7 @@ class ToolChoiceTypes(BaseModel):
"image_generation",
"code_interpreter",
]
- """The type of hosted tool the model should to use.
+ """The type of hosted tool the model should use.
Allowed values are:
@@ -61,7 +61,7 @@ class ToolChoiceAllowed(BaseModel):
```
"""
- type: Literal["allowed_tools"]
+ type: Literal["allowed_tools"] = "allowed_tools"
"""Allowed tool configuration type. Always `allowed_tools`."""
@@ -70,7 +70,7 @@ class ToolChoiceFunction(BaseModel):
name: str
"""The name of the function to call."""
- type: Literal["function"]
+ type: Literal["function"] = "function"
"""For function calling, the type is always `function`."""
@@ -79,7 +79,7 @@ class ToolChoiceMcp(BaseModel):
server_label: str
"""The label of the MCP server to use."""
- type: Literal["mcp"]
+ type: Literal["mcp"] = "mcp"
"""For MCP tools, the type is always `mcp`."""
name: str | None = None
@@ -91,5 +91,5 @@ class ToolChoiceCustom(BaseModel):
name: str
"""The name of the custom tool to call."""
- type: Literal["custom"]
+ type: Literal["custom"] = "custom"
"""For custom tool calling, the type is always `custom`."""
diff --git a/llama_stack/strong_typing/schema.py b/llama_stack/strong_typing/schema.py
index a8fc86a72..5c9826983 100644
--- a/llama_stack/strong_typing/schema.py
+++ b/llama_stack/strong_typing/schema.py
@@ -93,14 +93,7 @@ def get_class_property_docstrings(
"""
result = {}
- # Check if the type has __mro__ (method resolution order)
- if hasattr(data_type, "__mro__"):
- bases = inspect.getmro(data_type)
- else:
- # For TypeAliasType or other types without __mro__, just use the type itself
- bases = [data_type] if hasattr(data_type, "__doc__") else []
-
- for base in bases:
+ for base in inspect.getmro(data_type):
docstr = docstring.parse_type(base)
for param in docstr.params.values():
if param.name in result:
@@ -512,24 +505,13 @@ class JsonSchemaGenerator:
(concrete_type,) = typing.get_args(typ)
return self.type_to_schema(concrete_type)
- # Check if this is a TypeAliasType (Python 3.12+) which doesn't have __mro__
- if hasattr(typ, "__mro__"):
- # dictionary of class attributes
- members = dict(inspect.getmembers(typ, lambda a: not inspect.isroutine(a)))
- property_docstrings = get_class_property_docstrings(typ, self.options.property_description_fun)
- else:
- # TypeAliasType or other types without __mro__
- members = {}
- property_docstrings = {}
+ # dictionary of class attributes
+ members = dict(inspect.getmembers(typ, lambda a: not inspect.isroutine(a)))
+
+ property_docstrings = get_class_property_docstrings(typ, self.options.property_description_fun)
properties: Dict[str, Schema] = {}
required: List[str] = []
- # Only process properties if the type supports class properties
- if hasattr(typ, "__mro__"):
- class_properties = get_class_properties(typ)
- else:
- class_properties = []
-
- for property_name, property_type in class_properties:
+ for property_name, property_type in get_class_properties(typ):
# rename property if an alias name is specified
alias = get_annotation(property_type, Alias)
if alias: