mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-19 19:49:40 +00:00
[feat]: Add search_mode support to OpenAI vector store API
Add search_mode parameter (vector/keyword/hybrid) to openai_search_vector_store method. Fixes OpenAPI code generation by using str instead of Literal type. Signed-off-by: Varsha Prasad Narsing <varshaprasad96@gmail.com>
This commit is contained in:
parent
114946ae88
commit
0fa64ac762
9 changed files with 42 additions and 4 deletions
|
|
@ -337,13 +337,16 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
max_num_results: int | None = 10,
|
||||
ranking_options: SearchRankingOptions | None = None,
|
||||
rewrite_query: bool | None = False,
|
||||
# search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
|
||||
search_mode: str | None = "vector", # Using str instead of Literal due to OpenAPI schema generator limitations
|
||||
) -> VectorStoreSearchResponsePage:
|
||||
"""Search for chunks in a vector store."""
|
||||
# TODO: Add support in the API for this
|
||||
search_mode = "vector"
|
||||
max_num_results = max_num_results or 10
|
||||
|
||||
# Validate search_mode
|
||||
valid_modes = {"keyword", "vector", "hybrid"}
|
||||
if search_mode not in valid_modes:
|
||||
raise ValueError(f"search_mode must be one of {valid_modes}, got {search_mode}")
|
||||
|
||||
if vector_store_id not in self.openai_vector_stores:
|
||||
raise ValueError(f"Vector store {vector_store_id} not found")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue