mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-01 16:24:44 +00:00
feat: add function tools to openai responses
This commit is contained in:
parent
b90bb66f28
commit
1990df2c50
5 changed files with 220 additions and 3 deletions
107
docs/_static/llama-stack-spec.html
vendored
107
docs/_static/llama-stack-spec.html
vendored
|
@ -6549,6 +6549,113 @@
|
|||
"title": "OpenAIResponseInputMessageContentText"
|
||||
},
|
||||
"OpenAIResponseInputTool": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputToolWebSearch"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputToolFileSearch"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputToolFunction"
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type",
|
||||
"mapping": {
|
||||
"web_search": "#/components/schemas/OpenAIResponseInputToolWebSearch",
|
||||
"file_search": "#/components/schemas/OpenAIResponseInputToolFileSearch",
|
||||
"function": "#/components/schemas/OpenAIResponseInputToolFunction"
|
||||
}
|
||||
}
|
||||
},
|
||||
"OpenAIResponseInputToolFileSearch": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "file_search",
|
||||
"default": "file_search"
|
||||
},
|
||||
"vector_store_id": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"ranking_options": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ranker": {
|
||||
"type": "string"
|
||||
},
|
||||
"score_threshold": {
|
||||
"type": "number",
|
||||
"default": 0.0
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"title": "FileSearchRankingOptions"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"type",
|
||||
"vector_store_id"
|
||||
],
|
||||
"title": "OpenAIResponseInputToolFileSearch"
|
||||
},
|
||||
"OpenAIResponseInputToolFunction": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "function",
|
||||
"default": "function"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"strict": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"type",
|
||||
"name"
|
||||
],
|
||||
"title": "OpenAIResponseInputToolFunction"
|
||||
},
|
||||
"OpenAIResponseInputToolWebSearch": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
|
|
65
docs/_static/llama-stack-spec.yaml
vendored
65
docs/_static/llama-stack-spec.yaml
vendored
|
@ -4585,6 +4585,71 @@ components:
|
|||
- type
|
||||
title: OpenAIResponseInputMessageContentText
|
||||
OpenAIResponseInputTool:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/OpenAIResponseInputToolWebSearch'
|
||||
- $ref: '#/components/schemas/OpenAIResponseInputToolFileSearch'
|
||||
- $ref: '#/components/schemas/OpenAIResponseInputToolFunction'
|
||||
discriminator:
|
||||
propertyName: type
|
||||
mapping:
|
||||
web_search: '#/components/schemas/OpenAIResponseInputToolWebSearch'
|
||||
file_search: '#/components/schemas/OpenAIResponseInputToolFileSearch'
|
||||
function: '#/components/schemas/OpenAIResponseInputToolFunction'
|
||||
OpenAIResponseInputToolFileSearch:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
const: file_search
|
||||
default: file_search
|
||||
vector_store_id:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
ranking_options:
|
||||
type: object
|
||||
properties:
|
||||
ranker:
|
||||
type: string
|
||||
score_threshold:
|
||||
type: number
|
||||
default: 0.0
|
||||
additionalProperties: false
|
||||
title: FileSearchRankingOptions
|
||||
additionalProperties: false
|
||||
required:
|
||||
- type
|
||||
- vector_store_id
|
||||
title: OpenAIResponseInputToolFileSearch
|
||||
OpenAIResponseInputToolFunction:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
const: function
|
||||
default: function
|
||||
name:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
parameters:
|
||||
type: object
|
||||
additionalProperties:
|
||||
oneOf:
|
||||
- type: 'null'
|
||||
- type: boolean
|
||||
- type: number
|
||||
- type: string
|
||||
- type: array
|
||||
- type: object
|
||||
strict:
|
||||
type: boolean
|
||||
additionalProperties: false
|
||||
required:
|
||||
- type
|
||||
- name
|
||||
title: OpenAIResponseInputToolFunction
|
||||
OpenAIResponseInputToolWebSearch:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from typing import Annotated, Literal
|
||||
from typing import Annotated, Any, Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
@ -139,8 +139,30 @@ class OpenAIResponseInputToolWebSearch(BaseModel):
|
|||
# TODO: add user_location
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseInputToolFunction(BaseModel):
|
||||
type: Literal["function"] = "function"
|
||||
name: str
|
||||
description: str | None = None
|
||||
parameters: dict[str, Any] | None
|
||||
strict: bool | None = None
|
||||
|
||||
|
||||
class FileSearchRankingOptions(BaseModel):
|
||||
ranker: str | None = None
|
||||
score_threshold: float | None = Field(default=0.0, ge=0.0, le=1.0)
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseInputToolFileSearch(BaseModel):
|
||||
type: Literal["file_search"] = "file_search"
|
||||
vector_store_id: list[str]
|
||||
ranking_options: FileSearchRankingOptions | None = None
|
||||
# TODO: add filters
|
||||
|
||||
|
||||
OpenAIResponseInputTool = Annotated[
|
||||
OpenAIResponseInputToolWebSearch,
|
||||
OpenAIResponseInputToolWebSearch | OpenAIResponseInputToolFileSearch | OpenAIResponseInputToolFunction,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
register_schema(OpenAIResponseInputTool, name="OpenAIResponseInputTool")
|
||||
|
|
|
@ -17,6 +17,7 @@ from importlib.metadata import version as parse_version
|
|||
from pathlib import Path
|
||||
from typing import Annotated, Any
|
||||
|
||||
import rich.pretty
|
||||
import yaml
|
||||
from fastapi import Body, FastAPI, HTTPException, Request
|
||||
from fastapi import Path as FastapiPath
|
||||
|
@ -186,11 +187,30 @@ async def sse_generator(event_gen_coroutine):
|
|||
)
|
||||
|
||||
|
||||
async def log_request_pre_validation(request: Request):
|
||||
if request.method in ("POST", "PUT", "PATCH"):
|
||||
try:
|
||||
body_bytes = await request.body()
|
||||
if body_bytes:
|
||||
try:
|
||||
parsed_body = json.loads(body_bytes.decode())
|
||||
log_output = rich.pretty.pretty_repr(parsed_body)
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
log_output = repr(body_bytes)
|
||||
logger.debug(f"Incoming raw request body for {request.method} {request.url.path}:\n{log_output}")
|
||||
else:
|
||||
logger.debug(f"Incoming {request.method} {request.url.path} request with empty body.")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not read or log request body for {request.method} {request.url.path}: {e}")
|
||||
|
||||
|
||||
def create_dynamic_typed_route(func: Any, method: str, route: str):
|
||||
async def endpoint(request: Request, **kwargs):
|
||||
# Get auth attributes from the request scope
|
||||
user_attributes = request.scope.get("user_attributes", {})
|
||||
|
||||
await log_request_pre_validation(request)
|
||||
|
||||
# Use context manager with both provider data and auth attributes
|
||||
with request_provider_data_context(request.headers, user_attributes):
|
||||
is_streaming = is_streaming_request(func.__name__, request, **kwargs)
|
||||
|
|
|
@ -235,6 +235,7 @@ class OpenAIResponsesImpl:
|
|||
status="completed",
|
||||
output=output_messages,
|
||||
)
|
||||
logger.debug(f"OpenAI Responses response: {response}")
|
||||
|
||||
if store:
|
||||
# Store in kvstore
|
||||
|
@ -290,7 +291,9 @@ class OpenAIResponsesImpl:
|
|||
chat_tools: list[ChatCompletionToolParam] = []
|
||||
for input_tool in tools:
|
||||
# TODO: Handle other tool types
|
||||
if input_tool.type == "web_search":
|
||||
if input_tool.type == "function":
|
||||
chat_tools.append(ChatCompletionToolParam(type="function", function=input_tool.model_dump()))
|
||||
elif input_tool.type == "web_search":
|
||||
tool_name = "web_search"
|
||||
tool = await self.tool_groups_api.get_tool(tool_name)
|
||||
tool_def = ToolDefinition(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue