feat: Add metadata field to request and response

This changes adds Optional metadata field to OpenAI compatible request and
response object.

fixes: #3564
Signed-off-by: Abhishek Bongale <abhishekbongale@outlook.com>
This commit is contained in:
Abhishek Bongale 2025-11-26 11:53:57 +00:00
parent b1c5b8fa9f
commit cfbc1f1624
10 changed files with 98 additions and 0 deletions

View file

@ -6796,6 +6796,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
input: input:
items: items:
anyOf: anyOf:
@ -7199,6 +7205,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- input - input
@ -7330,6 +7342,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- created_at - created_at

View file

@ -3639,6 +3639,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
input: input:
items: items:
anyOf: anyOf:
@ -4042,6 +4048,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- input - input
@ -4173,6 +4185,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- created_at - created_at

View file

@ -3336,6 +3336,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
input: input:
items: items:
anyOf: anyOf:
@ -3736,6 +3742,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- created_at - created_at

View file

@ -5817,6 +5817,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
input: input:
items: items:
anyOf: anyOf:
@ -6220,6 +6226,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- input - input
@ -6351,6 +6363,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- created_at - created_at

View file

@ -6796,6 +6796,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
input: input:
items: items:
anyOf: anyOf:
@ -7199,6 +7205,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- input - input
@ -7330,6 +7342,12 @@ components:
anyOf: anyOf:
- type: integer - type: integer
- type: 'null' - type: 'null'
metadata:
anyOf:
- additionalProperties:
type: string
type: object
- type: 'null'
type: object type: object
required: required:
- created_at - created_at

View file

@ -111,6 +111,7 @@ class MetaReferenceAgentsImpl(Agents):
max_infer_iters: int | None = 10, max_infer_iters: int | None = 10,
guardrails: list[ResponseGuardrail] | None = None, guardrails: list[ResponseGuardrail] | None = None,
max_tool_calls: int | None = None, max_tool_calls: int | None = None,
metadata: dict[str, str] | None = None,
) -> OpenAIResponseObject: ) -> OpenAIResponseObject:
assert self.openai_responses_impl is not None, "OpenAI responses not initialized" assert self.openai_responses_impl is not None, "OpenAI responses not initialized"
result = await self.openai_responses_impl.create_openai_response( result = await self.openai_responses_impl.create_openai_response(
@ -130,6 +131,7 @@ class MetaReferenceAgentsImpl(Agents):
guardrails, guardrails,
parallel_tool_calls, parallel_tool_calls,
max_tool_calls, max_tool_calls,
metadata,
) )
return result # type: ignore[no-any-return] return result # type: ignore[no-any-return]

View file

@ -336,6 +336,7 @@ class OpenAIResponsesImpl:
guardrails: list[str | ResponseGuardrailSpec] | None = None, guardrails: list[str | ResponseGuardrailSpec] | None = None,
parallel_tool_calls: bool | None = None, parallel_tool_calls: bool | None = None,
max_tool_calls: int | None = None, max_tool_calls: int | None = None,
metadata: dict[str, str] | None = None,
): ):
stream = bool(stream) stream = bool(stream)
text = OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")) if text is None else text text = OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")) if text is None else text
@ -390,6 +391,7 @@ class OpenAIResponsesImpl:
guardrail_ids=guardrail_ids, guardrail_ids=guardrail_ids,
parallel_tool_calls=parallel_tool_calls, parallel_tool_calls=parallel_tool_calls,
max_tool_calls=max_tool_calls, max_tool_calls=max_tool_calls,
metadata=metadata,
) )
if stream: if stream:
@ -442,6 +444,7 @@ class OpenAIResponsesImpl:
guardrail_ids: list[str] | None = None, guardrail_ids: list[str] | None = None,
parallel_tool_calls: bool | None = True, parallel_tool_calls: bool | None = True,
max_tool_calls: int | None = None, max_tool_calls: int | None = None,
metadata: dict[str, str] | None = None,
) -> AsyncIterator[OpenAIResponseObjectStream]: ) -> AsyncIterator[OpenAIResponseObjectStream]:
# These should never be None when called from create_openai_response (which sets defaults) # These should never be None when called from create_openai_response (which sets defaults)
# but we assert here to help mypy understand the types # but we assert here to help mypy understand the types
@ -490,6 +493,7 @@ class OpenAIResponsesImpl:
guardrail_ids=guardrail_ids, guardrail_ids=guardrail_ids,
instructions=instructions, instructions=instructions,
max_tool_calls=max_tool_calls, max_tool_calls=max_tool_calls,
metadata=metadata,
) )
# Stream the response # Stream the response

View file

@ -118,6 +118,7 @@ class StreamingResponseOrchestrator:
prompt: OpenAIResponsePrompt | None = None, prompt: OpenAIResponsePrompt | None = None,
parallel_tool_calls: bool | None = None, parallel_tool_calls: bool | None = None,
max_tool_calls: int | None = None, max_tool_calls: int | None = None,
metadata: dict[str, str] | None = None,
): ):
self.inference_api = inference_api self.inference_api = inference_api
self.ctx = ctx self.ctx = ctx
@ -135,6 +136,7 @@ class StreamingResponseOrchestrator:
self.parallel_tool_calls = parallel_tool_calls self.parallel_tool_calls = parallel_tool_calls
# Max number of total calls to built-in tools that can be processed in a response # Max number of total calls to built-in tools that can be processed in a response
self.max_tool_calls = max_tool_calls self.max_tool_calls = max_tool_calls
self.metadata = metadata
self.sequence_number = 0 self.sequence_number = 0
# Store MCP tool mapping that gets built during tool processing # Store MCP tool mapping that gets built during tool processing
self.mcp_tool_to_server: dict[str, OpenAIResponseInputToolMCP] = ( self.mcp_tool_to_server: dict[str, OpenAIResponseInputToolMCP] = (
@ -162,6 +164,7 @@ class StreamingResponseOrchestrator:
model=self.ctx.model, model=self.ctx.model,
status="completed", status="completed",
output=[OpenAIResponseMessage(role="assistant", content=[refusal_content], type="message")], output=[OpenAIResponseMessage(role="assistant", content=[refusal_content], type="message")],
metadata=self.metadata,
) )
return OpenAIResponseObjectStreamResponseCompleted(response=refusal_response) return OpenAIResponseObjectStreamResponseCompleted(response=refusal_response)
@ -197,6 +200,7 @@ class StreamingResponseOrchestrator:
prompt=self.prompt, prompt=self.prompt,
parallel_tool_calls=self.parallel_tool_calls, parallel_tool_calls=self.parallel_tool_calls,
max_tool_calls=self.max_tool_calls, max_tool_calls=self.max_tool_calls,
metadata=self.metadata,
) )
async def create_response(self) -> AsyncIterator[OpenAIResponseObjectStream]: async def create_response(self) -> AsyncIterator[OpenAIResponseObjectStream]:

View file

@ -89,6 +89,7 @@ class Agents(Protocol):
), ),
] = None, ] = None,
max_tool_calls: int | None = None, max_tool_calls: int | None = None,
metadata: dict[str, str] | None = None,
) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]: ) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]:
"""Create a model response. """Create a model response.
@ -100,6 +101,7 @@ class Agents(Protocol):
:param include: (Optional) Additional fields to include in the response. :param include: (Optional) Additional fields to include in the response.
:param guardrails: (Optional) List of guardrails to apply during response generation. Can be guardrail IDs (strings) or guardrail specifications. :param guardrails: (Optional) List of guardrails to apply during response generation. Can be guardrail IDs (strings) or guardrail specifications.
:param max_tool_calls: (Optional) Max number of total calls to built-in tools that can be processed in a response. :param max_tool_calls: (Optional) Max number of total calls to built-in tools that can be processed in a response.
:param metadata: (Optional) Dictionary of metadata key-value pairs to attach to the response.
:returns: An OpenAIResponseObject. :returns: An OpenAIResponseObject.
""" """
... ...

View file

@ -597,6 +597,7 @@ class OpenAIResponseObject(BaseModel):
:param usage: (Optional) Token usage information for the response :param usage: (Optional) Token usage information for the response
:param instructions: (Optional) System message inserted into the model's context :param instructions: (Optional) System message inserted into the model's context
:param max_tool_calls: (Optional) Max number of total calls to built-in tools that can be processed in a response :param max_tool_calls: (Optional) Max number of total calls to built-in tools that can be processed in a response
:param metadata: (Optional) Dictionary of metadata key-value pairs
""" """
created_at: int created_at: int
@ -619,6 +620,7 @@ class OpenAIResponseObject(BaseModel):
usage: OpenAIResponseUsage | None = None usage: OpenAIResponseUsage | None = None
instructions: str | None = None instructions: str | None = None
max_tool_calls: int | None = None max_tool_calls: int | None = None
metadata: dict[str, str] | None = None
@json_schema_type @json_schema_type