feat(responses): add include parameter

This commit is contained in:
Ashwin Bharambe 2025-08-12 10:17:00 -07:00
parent 6812aa1e1e
commit e66fc547e3
5 changed files with 28 additions and 1 deletions

View file

@ -8515,6 +8515,13 @@
"$ref": "#/components/schemas/OpenAIResponseInputTool" "$ref": "#/components/schemas/OpenAIResponseInputTool"
} }
}, },
"include": {
"type": "array",
"items": {
"type": "string"
},
"description": "(Optional) Additional fields to include in the response."
},
"max_infer_iters": { "max_infer_iters": {
"type": "integer" "type": "integer"
} }

View file

@ -6188,6 +6188,12 @@ components:
type: array type: array
items: items:
$ref: '#/components/schemas/OpenAIResponseInputTool' $ref: '#/components/schemas/OpenAIResponseInputTool'
include:
type: array
items:
type: string
description: >-
(Optional) Additional fields to include in the response.
max_infer_iters: max_infer_iters:
type: integer type: integer
additionalProperties: false additionalProperties: false

View file

@ -706,6 +706,7 @@ class Agents(Protocol):
temperature: float | None = None, temperature: float | None = None,
text: OpenAIResponseText | None = None, text: OpenAIResponseText | None = None,
tools: list[OpenAIResponseInputTool] | None = None, tools: list[OpenAIResponseInputTool] | None = None,
include: list[str] | None = None,
max_infer_iters: int | None = 10, # this is an extension to the OpenAI API max_infer_iters: int | None = 10, # this is an extension to the OpenAI API
) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]: ) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]:
"""Create a new OpenAI response. """Create a new OpenAI response.
@ -713,6 +714,7 @@ class Agents(Protocol):
:param input: Input message(s) to create the response. :param input: Input message(s) to create the response.
:param model: The underlying LLM used for completions. :param model: The underlying LLM used for completions.
:param previous_response_id: (Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses. :param previous_response_id: (Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses.
:param include: (Optional) Additional fields to include in the response.
:returns: An OpenAIResponseObject. :returns: An OpenAIResponseObject.
""" """
... ...

View file

@ -327,10 +327,21 @@ class MetaReferenceAgentsImpl(Agents):
temperature: float | None = None, temperature: float | None = None,
text: OpenAIResponseText | None = None, text: OpenAIResponseText | None = None,
tools: list[OpenAIResponseInputTool] | None = None, tools: list[OpenAIResponseInputTool] | None = None,
include: list[str] | None = None,
max_infer_iters: int | None = 10, max_infer_iters: int | None = 10,
) -> OpenAIResponseObject: ) -> OpenAIResponseObject:
return await self.openai_responses_impl.create_openai_response( return await self.openai_responses_impl.create_openai_response(
input, model, instructions, previous_response_id, store, stream, temperature, text, tools, max_infer_iters input,
model,
instructions,
previous_response_id,
store,
stream,
temperature,
text,
tools,
include,
max_infer_iters,
) )
async def list_openai_responses( async def list_openai_responses(

View file

@ -333,6 +333,7 @@ class OpenAIResponsesImpl:
temperature: float | None = None, temperature: float | None = None,
text: OpenAIResponseText | None = None, text: OpenAIResponseText | None = None,
tools: list[OpenAIResponseInputTool] | None = None, tools: list[OpenAIResponseInputTool] | None = None,
include: list[str] | None = None,
max_infer_iters: int | None = 10, max_infer_iters: int | None = 10,
): ):
stream = bool(stream) stream = bool(stream)