mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
refactor: move Authorization validation to correct handler file
Per reviewer feedback, validation should be in the openai_responses.py handler, not the streaming.py file. Moved validation logic to create_openai_response() method which is the main entry point for response creation. - Added validation in create_openai_response() before processing - Removed duplicate validation from _process_mcp_tool() in streaming.py - Validation runs early and rejects malformed requests immediately - Maintains same security check: rejects Authorization in headers dict
This commit is contained in:
parent
50040f3df7
commit
2b0423c337
2 changed files with 13 additions and 9 deletions
|
|
@ -259,6 +259,19 @@ class OpenAIResponsesImpl:
|
||||||
stream = bool(stream)
|
stream = bool(stream)
|
||||||
text = OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")) if text is None else text
|
text = OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")) if text is None else text
|
||||||
|
|
||||||
|
# Validate MCP tools: ensure Authorization header is not passed via headers dict
|
||||||
|
if tools:
|
||||||
|
from llama_stack.apis.agents.openai_responses import OpenAIResponseInputToolMCP
|
||||||
|
|
||||||
|
for tool in tools:
|
||||||
|
if isinstance(tool, OpenAIResponseInputToolMCP) and tool.headers:
|
||||||
|
for key in tool.headers.keys():
|
||||||
|
if key.lower() == "authorization":
|
||||||
|
raise ValueError(
|
||||||
|
"Authorization header cannot be passed via 'headers'. "
|
||||||
|
"Please use the 'authorization' parameter instead."
|
||||||
|
)
|
||||||
|
|
||||||
guardrail_ids = extract_guardrail_ids(guardrails) if guardrails else []
|
guardrail_ids = extract_guardrail_ids(guardrails) if guardrails else []
|
||||||
|
|
||||||
if conversation is not None:
|
if conversation is not None:
|
||||||
|
|
|
||||||
|
|
@ -1055,15 +1055,6 @@ class StreamingResponseOrchestrator:
|
||||||
"""Process an MCP tool configuration and emit appropriate streaming events."""
|
"""Process an MCP tool configuration and emit appropriate streaming events."""
|
||||||
from llama_stack.providers.utils.tools.mcp import list_mcp_tools
|
from llama_stack.providers.utils.tools.mcp import list_mcp_tools
|
||||||
|
|
||||||
# Validate that Authorization header is not passed via headers dict
|
|
||||||
if mcp_tool.headers:
|
|
||||||
for key in mcp_tool.headers.keys():
|
|
||||||
if key.lower() == "authorization":
|
|
||||||
raise ValueError(
|
|
||||||
"Authorization header cannot be passed via 'headers'. "
|
|
||||||
"Please use the 'authorization' parameter instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Emit mcp_list_tools.in_progress
|
# Emit mcp_list_tools.in_progress
|
||||||
self.sequence_number += 1
|
self.sequence_number += 1
|
||||||
yield OpenAIResponseObjectStreamResponseMcpListToolsInProgress(
|
yield OpenAIResponseObjectStreamResponseMcpListToolsInProgress(
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue