mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 10:54:19 +00:00
Responses file_search wire up additional params
This adds passing of max_num_results from the file_search tool call into the knowledge_search tool, as well as logs warnings if the filters or ranking_options params are used since those are not wired up yet. And, it adds the API surface for filters and ranking options so we don't have to generate clients again as we add that. Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
parent
788d34d8b4
commit
8a5ea57253
4 changed files with 53 additions and 2 deletions
|
@ -409,8 +409,9 @@ class FileSearchRankingOptions(BaseModel):
|
|||
class OpenAIResponseInputToolFileSearch(BaseModel):
|
||||
type: Literal["file_search"] = "file_search"
|
||||
vector_store_ids: list[str]
|
||||
filters: dict[str, Any] | None = None
|
||||
max_num_results: int | None = Field(default=10, ge=1, le=50)
|
||||
ranking_options: FileSearchRankingOptions | None = None
|
||||
# TODO: add filters, max_num_results
|
||||
|
||||
|
||||
class ApprovalFilter(BaseModel):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue