mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-20 03:40:05 +00:00
chore: Added openai compatible vector io endpoints for chromadb
This commit is contained in:
parent
6fde601765
commit
e9c443a91f
4 changed files with 127 additions and 95 deletions
|
@ -22,7 +22,7 @@ logger = logging.getLogger(__name__)
|
|||
def skip_if_provider_doesnt_support_openai_vector_stores(client_with_models):
|
||||
vector_io_providers = [p for p in client_with_models.providers.list() if p.api == "vector_io"]
|
||||
for p in vector_io_providers:
|
||||
if p.provider_type in ["inline::faiss", "inline::sqlite-vec"]:
|
||||
if p.provider_type in ["inline::faiss", "inline::sqlite-vec", "inline::chromadb"]:
|
||||
return
|
||||
|
||||
pytest.skip("OpenAI vector stores are not supported by any provider")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue