feat: File search tool for Responses API (#2426)

# What does this PR do?

This is an initial working prototype of wiring up the `file_search`
builtin tool for the Responses API to our existing rag knowledge search
tool.

This is me seeing what I could pull together on top of the bits we
already have merged. This may not be the ideal way to implement this,
and things like how I shuffle the vector store ids from the original
response API tool request to the actual tool execution feel a bit hacky
(grep for `tool_kwargs["vector_db_ids"]` in `_execute_tool_call` to see
what I mean).

## Test Plan

I stubbed in some new tests to exercise this using text and pdf
documents.

Note that this is currently under tests/verification only because it
sometimes flakes with tool calling of the small Llama-3.2-3B model we
run in CI (and that I use as an example below). We'd want to make the
test a bit more robust in some way if we moved this over to
tests/integration and ran it in CI.

### OpenAI SaaS (to verify test correctness)

```
pytest -sv tests/verifications/openai_api/test_responses.py \
  -k 'file_search' \
  --base-url=https://api.openai.com/v1 \
  --model=gpt-4o
```

### Fireworks with faiss vector store

```
llama stack run llama_stack/templates/fireworks/run.yaml

pytest -sv tests/verifications/openai_api/test_responses.py \
  -k 'file_search' \
  --base-url=http://localhost:8321/v1/openai/v1 \
  --model=meta-llama/Llama-3.3-70B-Instruct
```

### Ollama with faiss vector store

This sometimes flakes on Ollama because the quantized small model
doesn't always choose to call the tool to answer the user's question.
But, it often works.

```
ollama run llama3.2:3b

INFERENCE_MODEL="meta-llama/Llama-3.2-3B-Instruct" \
llama stack run ./llama_stack/templates/ollama/run.yaml \
  --image-type venv \
  --env OLLAMA_URL="http://0.0.0.0:11434"

pytest -sv tests/verifications/openai_api/test_responses.py \
  -k'file_search' \
  --base-url=http://localhost:8321/v1/openai/v1 \
  --model=meta-llama/Llama-3.2-3B-Instruct
```

### OpenAI provider with sqlite-vec vector store

```
llama stack run ./llama_stack/templates/starter/run.yaml --image-type venv

 pytest -sv tests/verifications/openai_api/test_responses.py \
  -k 'file_search' \
  --base-url=http://localhost:8321/v1/openai/v1 \
  --model=openai/gpt-4o-mini
```

### Ensure existing vector store integration tests still pass

```
ollama run llama3.2:3b

INFERENCE_MODEL="meta-llama/Llama-3.2-3B-Instruct" \
llama stack run ./llama_stack/templates/ollama/run.yaml \
  --image-type venv \
  --env OLLAMA_URL="http://0.0.0.0:11434"

LLAMA_STACK_CONFIG=http://localhost:8321 \
pytest -sv tests/integration/vector_io \
  --text-model "meta-llama/Llama-3.2-3B-Instruct" \
  --embedding-model=all-MiniLM-L6-v2
```

---------

Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
Ben Browning 2025-06-13 14:32:48 -04:00 committed by GitHub
parent 554ada57b0
commit 941f505eb0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 1105 additions and 24 deletions

View file

@ -3240,6 +3240,59 @@
}
}
},
"/v1/openai/v1/vector_stores/{vector_store_id}/files": {
"post": {
"responses": {
"200": {
"description": "A VectorStoreFileObject representing the attached file.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/VectorStoreFileObject"
}
}
}
},
"400": {
"$ref": "#/components/responses/BadRequest400"
},
"429": {
"$ref": "#/components/responses/TooManyRequests429"
},
"500": {
"$ref": "#/components/responses/InternalServerError500"
},
"default": {
"$ref": "#/components/responses/DefaultError"
}
},
"tags": [
"VectorIO"
],
"description": "Attach a file to a vector store.",
"parameters": [
{
"name": "vector_store_id",
"in": "path",
"description": "The ID of the vector store to attach the file to.",
"required": true,
"schema": {
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/OpenaiAttachFileToVectorStoreRequest"
}
}
},
"required": true
}
}
},
"/v1/openai/v1/completions": {
"post": {
"responses": {
@ -7047,6 +7100,9 @@
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall"
},
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall"
},
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall"
},
@ -7193,12 +7249,41 @@
"const": "file_search",
"default": "file_search"
},
"vector_store_id": {
"vector_store_ids": {
"type": "array",
"items": {
"type": "string"
}
},
"filters": {
"type": "object",
"additionalProperties": {
"oneOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "number"
},
{
"type": "string"
},
{
"type": "array"
},
{
"type": "object"
}
]
}
},
"max_num_results": {
"type": "integer",
"default": 10
},
"ranking_options": {
"type": "object",
"properties": {
@ -7217,7 +7302,7 @@
"additionalProperties": false,
"required": [
"type",
"vector_store_id"
"vector_store_ids"
],
"title": "OpenAIResponseInputToolFileSearch"
},
@ -7484,6 +7569,64 @@
],
"title": "OpenAIResponseOutputMessageContentOutputText"
},
"OpenAIResponseOutputMessageFileSearchToolCall": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"queries": {
"type": "array",
"items": {
"type": "string"
}
},
"status": {
"type": "string"
},
"type": {
"type": "string",
"const": "file_search_call",
"default": "file_search_call"
},
"results": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": {
"oneOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "number"
},
{
"type": "string"
},
{
"type": "array"
},
{
"type": "object"
}
]
}
}
}
},
"additionalProperties": false,
"required": [
"id",
"queries",
"status",
"type"
],
"title": "OpenAIResponseOutputMessageFileSearchToolCall"
},
"OpenAIResponseOutputMessageFunctionToolCall": {
"type": "object",
"properties": {
@ -7760,6 +7903,9 @@
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall"
},
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall"
},
{
"$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall"
},
@ -7775,6 +7921,7 @@
"mapping": {
"message": "#/components/schemas/OpenAIResponseMessage",
"web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall",
"file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall",
"function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall",
"mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall",
"mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools"
@ -11766,6 +11913,232 @@
],
"title": "LogEventRequest"
},
"VectorStoreChunkingStrategy": {
"oneOf": [
{
"$ref": "#/components/schemas/VectorStoreChunkingStrategyAuto"
},
{
"$ref": "#/components/schemas/VectorStoreChunkingStrategyStatic"
}
],
"discriminator": {
"propertyName": "type",
"mapping": {
"auto": "#/components/schemas/VectorStoreChunkingStrategyAuto",
"static": "#/components/schemas/VectorStoreChunkingStrategyStatic"
}
}
},
"VectorStoreChunkingStrategyAuto": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "auto",
"default": "auto"
}
},
"additionalProperties": false,
"required": [
"type"
],
"title": "VectorStoreChunkingStrategyAuto"
},
"VectorStoreChunkingStrategyStatic": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "static",
"default": "static"
},
"static": {
"$ref": "#/components/schemas/VectorStoreChunkingStrategyStaticConfig"
}
},
"additionalProperties": false,
"required": [
"type",
"static"
],
"title": "VectorStoreChunkingStrategyStatic"
},
"VectorStoreChunkingStrategyStaticConfig": {
"type": "object",
"properties": {
"chunk_overlap_tokens": {
"type": "integer",
"default": 400
},
"max_chunk_size_tokens": {
"type": "integer",
"default": 800
}
},
"additionalProperties": false,
"required": [
"chunk_overlap_tokens",
"max_chunk_size_tokens"
],
"title": "VectorStoreChunkingStrategyStaticConfig"
},
"OpenaiAttachFileToVectorStoreRequest": {
"type": "object",
"properties": {
"file_id": {
"type": "string",
"description": "The ID of the file to attach to the vector store."
},
"attributes": {
"type": "object",
"additionalProperties": {
"oneOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "number"
},
{
"type": "string"
},
{
"type": "array"
},
{
"type": "object"
}
]
},
"description": "The key-value attributes stored with the file, which can be used for filtering."
},
"chunking_strategy": {
"$ref": "#/components/schemas/VectorStoreChunkingStrategy",
"description": "The chunking strategy to use for the file."
}
},
"additionalProperties": false,
"required": [
"file_id"
],
"title": "OpenaiAttachFileToVectorStoreRequest"
},
"VectorStoreFileLastError": {
"type": "object",
"properties": {
"code": {
"oneOf": [
{
"type": "string",
"const": "server_error"
},
{
"type": "string",
"const": "rate_limit_exceeded"
}
]
},
"message": {
"type": "string"
}
},
"additionalProperties": false,
"required": [
"code",
"message"
],
"title": "VectorStoreFileLastError"
},
"VectorStoreFileObject": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"object": {
"type": "string",
"default": "vector_store.file"
},
"attributes": {
"type": "object",
"additionalProperties": {
"oneOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "number"
},
{
"type": "string"
},
{
"type": "array"
},
{
"type": "object"
}
]
}
},
"chunking_strategy": {
"$ref": "#/components/schemas/VectorStoreChunkingStrategy"
},
"created_at": {
"type": "integer"
},
"last_error": {
"$ref": "#/components/schemas/VectorStoreFileLastError"
},
"status": {
"oneOf": [
{
"type": "string",
"const": "completed"
},
{
"type": "string",
"const": "in_progress"
},
{
"type": "string",
"const": "cancelled"
},
{
"type": "string",
"const": "failed"
}
]
},
"usage_bytes": {
"type": "integer",
"default": 0
},
"vector_store_id": {
"type": "string"
}
},
"additionalProperties": false,
"required": [
"id",
"object",
"attributes",
"chunking_strategy",
"created_at",
"status",
"usage_bytes",
"vector_store_id"
],
"title": "VectorStoreFileObject",
"description": "OpenAI Vector Store File object."
},
"OpenAIJSONSchema": {
"type": "object",
"properties": {

View file

@ -2263,6 +2263,43 @@ paths:
schema:
$ref: '#/components/schemas/LogEventRequest'
required: true
/v1/openai/v1/vector_stores/{vector_store_id}/files:
post:
responses:
'200':
description: >-
A VectorStoreFileObject representing the attached file.
content:
application/json:
schema:
$ref: '#/components/schemas/VectorStoreFileObject'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- VectorIO
description: Attach a file to a vector store.
parameters:
- name: vector_store_id
in: path
description: >-
The ID of the vector store to attach the file to.
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/OpenaiAttachFileToVectorStoreRequest'
required: true
/v1/openai/v1/completions:
post:
responses:
@ -5021,6 +5058,7 @@ components:
OpenAIResponseInput:
oneOf:
- $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
- $ref: '#/components/schemas/OpenAIResponseInputFunctionToolCallOutput'
- $ref: '#/components/schemas/OpenAIResponseMessage'
@ -5115,10 +5153,23 @@ components:
type: string
const: file_search
default: file_search
vector_store_id:
vector_store_ids:
type: array
items:
type: string
filters:
type: object
additionalProperties:
oneOf:
- type: 'null'
- type: boolean
- type: number
- type: string
- type: array
- type: object
max_num_results:
type: integer
default: 10
ranking_options:
type: object
properties:
@ -5132,7 +5183,7 @@ components:
additionalProperties: false
required:
- type
- vector_store_id
- vector_store_ids
title: OpenAIResponseInputToolFileSearch
OpenAIResponseInputToolFunction:
type: object
@ -5294,6 +5345,41 @@ components:
- type
title: >-
OpenAIResponseOutputMessageContentOutputText
"OpenAIResponseOutputMessageFileSearchToolCall":
type: object
properties:
id:
type: string
queries:
type: array
items:
type: string
status:
type: string
type:
type: string
const: file_search_call
default: file_search_call
results:
type: array
items:
type: object
additionalProperties:
oneOf:
- type: 'null'
- type: boolean
- type: number
- type: string
- type: array
- type: object
additionalProperties: false
required:
- id
- queries
- status
- type
title: >-
OpenAIResponseOutputMessageFileSearchToolCall
"OpenAIResponseOutputMessageFunctionToolCall":
type: object
properties:
@ -5491,6 +5577,7 @@ components:
oneOf:
- $ref: '#/components/schemas/OpenAIResponseMessage'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
- $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
@ -5499,6 +5586,7 @@ components:
mapping:
message: '#/components/schemas/OpenAIResponseMessage'
web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
@ -8251,6 +8339,148 @@ components:
- event
- ttl_seconds
title: LogEventRequest
VectorStoreChunkingStrategy:
oneOf:
- $ref: '#/components/schemas/VectorStoreChunkingStrategyAuto'
- $ref: '#/components/schemas/VectorStoreChunkingStrategyStatic'
discriminator:
propertyName: type
mapping:
auto: '#/components/schemas/VectorStoreChunkingStrategyAuto'
static: '#/components/schemas/VectorStoreChunkingStrategyStatic'
VectorStoreChunkingStrategyAuto:
type: object
properties:
type:
type: string
const: auto
default: auto
additionalProperties: false
required:
- type
title: VectorStoreChunkingStrategyAuto
VectorStoreChunkingStrategyStatic:
type: object
properties:
type:
type: string
const: static
default: static
static:
$ref: '#/components/schemas/VectorStoreChunkingStrategyStaticConfig'
additionalProperties: false
required:
- type
- static
title: VectorStoreChunkingStrategyStatic
VectorStoreChunkingStrategyStaticConfig:
type: object
properties:
chunk_overlap_tokens:
type: integer
default: 400
max_chunk_size_tokens:
type: integer
default: 800
additionalProperties: false
required:
- chunk_overlap_tokens
- max_chunk_size_tokens
title: VectorStoreChunkingStrategyStaticConfig
OpenaiAttachFileToVectorStoreRequest:
type: object
properties:
file_id:
type: string
description: >-
The ID of the file to attach to the vector store.
attributes:
type: object
additionalProperties:
oneOf:
- type: 'null'
- type: boolean
- type: number
- type: string
- type: array
- type: object
description: >-
The key-value attributes stored with the file, which can be used for filtering.
chunking_strategy:
$ref: '#/components/schemas/VectorStoreChunkingStrategy'
description: >-
The chunking strategy to use for the file.
additionalProperties: false
required:
- file_id
title: OpenaiAttachFileToVectorStoreRequest
VectorStoreFileLastError:
type: object
properties:
code:
oneOf:
- type: string
const: server_error
- type: string
const: rate_limit_exceeded
message:
type: string
additionalProperties: false
required:
- code
- message
title: VectorStoreFileLastError
VectorStoreFileObject:
type: object
properties:
id:
type: string
object:
type: string
default: vector_store.file
attributes:
type: object
additionalProperties:
oneOf:
- type: 'null'
- type: boolean
- type: number
- type: string
- type: array
- type: object
chunking_strategy:
$ref: '#/components/schemas/VectorStoreChunkingStrategy'
created_at:
type: integer
last_error:
$ref: '#/components/schemas/VectorStoreFileLastError'
status:
oneOf:
- type: string
const: completed
- type: string
const: in_progress
- type: string
const: cancelled
- type: string
const: failed
usage_bytes:
type: integer
default: 0
vector_store_id:
type: string
additionalProperties: false
required:
- id
- object
- attributes
- chunking_strategy
- created_at
- status
- usage_bytes
- vector_store_id
title: VectorStoreFileObject
description: OpenAI Vector Store File object.
OpenAIJSONSchema:
type: object
properties:

View file

@ -18,6 +18,7 @@ The `llamastack/distribution-ollama` distribution consists of the following prov
| agents | `inline::meta-reference` |
| datasetio | `remote::huggingface`, `inline::localfs` |
| eval | `inline::meta-reference` |
| files | `inline::localfs` |
| inference | `remote::ollama` |
| post_training | `inline::huggingface` |
| safety | `inline::llama-guard` |