set defaults so that stainless does not pick as required params

This commit is contained in:
Hardik Shah 2025-06-11 17:19:33 -07:00
parent d55100d9b7
commit 764026c0d6
8 changed files with 41 additions and 41 deletions

View file

@ -193,8 +193,8 @@ class VectorIO(Protocol):
@webmethod(route="/openai/v1/vector_stores", method="GET")
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
@ -256,9 +256,9 @@ class VectorIO(Protocol):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = None,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
rewrite_query: bool | None = None,
) -> VectorStoreSearchResponse:
"""Search for chunks in a vector store.

View file

@ -151,8 +151,8 @@ class VectorIORouter(VectorIO):
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
@ -239,9 +239,9 @@ class VectorIORouter(VectorIO):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = 10,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
rewrite_query: bool | None = False,
) -> VectorStoreSearchResponse:
logger.debug(f"VectorIORouter.openai_search_vector_store: {vector_store_id}")
# Route based on vector store ID

View file

@ -6,7 +6,7 @@
import asyncio
import json
import logging
from typing import Any, Literal
from typing import Any
from urllib.parse import urlparse
import chromadb
@ -203,8 +203,8 @@ class ChromaVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
@ -236,9 +236,8 @@ class ChromaVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = 10,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
rewrite_query: bool | None = False,
) -> VectorStoreSearchResponse:
raise NotImplementedError("OpenAI Vector Stores API is not supported in Chroma")

View file

@ -9,7 +9,7 @@ import hashlib
import logging
import os
import uuid
from typing import Any, Literal
from typing import Any
from numpy.typing import NDArray
from pymilvus import MilvusClient
@ -201,8 +201,8 @@ class MilvusVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
@ -234,10 +234,9 @@ class MilvusVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = 10,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
rewrite_query: bool | None = False,
) -> VectorStoreSearchResponse:
raise NotImplementedError("OpenAI Vector Stores API is not supported in Qdrant")

View file

@ -6,7 +6,7 @@
import logging
import uuid
from typing import Any, Literal
from typing import Any
from numpy.typing import NDArray
from qdrant_client import AsyncQdrantClient, models
@ -203,8 +203,8 @@ class QdrantVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
@ -236,9 +236,8 @@ class QdrantVectorIOAdapter(VectorIO, VectorDBsProtocolPrivate):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = 10,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
rewrite_query: bool | None = False,
) -> VectorStoreSearchResponse:
raise NotImplementedError("OpenAI Vector Stores API is not supported in Qdrant")

View file

@ -8,7 +8,7 @@ import logging
import time
import uuid
from abc import ABC, abstractmethod
from typing import Any, Literal
from typing import Any
from llama_stack.apis.vector_dbs import VectorDB
from llama_stack.apis.vector_io import (
@ -161,12 +161,15 @@ class OpenAIVectorStoreMixin(ABC):
async def openai_list_vector_stores(
self,
limit: int = 20,
order: str = "desc",
limit: int | None = 20,
order: str | None = "desc",
after: str | None = None,
before: str | None = None,
) -> VectorStoreListResponse:
"""Returns a list of vector stores."""
limit = limit or 20
order = order or "desc"
# Get all vector stores
all_stores = list(self.openai_vector_stores.values())
@ -274,12 +277,16 @@ class OpenAIVectorStoreMixin(ABC):
vector_store_id: str,
query: str | list[str],
filters: dict[str, Any] | None = None,
max_num_results: int = 10,
max_num_results: int | None = 10,
ranking_options: dict[str, Any] | None = None,
rewrite_query: bool = False,
search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
rewrite_query: bool | None = False,
# search_mode: Literal["keyword", "vector", "hybrid"] = "vector",
) -> VectorStoreSearchResponse:
"""Search for chunks in a vector store."""
# TODO: Add support in the API for this
search_mode = "vector"
max_num_results = max_num_results or 10
if vector_store_id not in self.openai_vector_stores:
raise ValueError(f"Vector store {vector_store_id} not found")