fix: remove sentence-transformers from remote vllm

vLLM itself can perform the embeddings generation so we don't need this
extra provider.

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-06-03 18:00:27 +02:00
parent 3c9a10d2fe
commit 7e30b5a466
No known key found for this signature in database
5 changed files with 6 additions and 22 deletions

View file

@ -26,9 +26,6 @@ providers:
max_tokens: ${env.VLLM_MAX_TOKENS:4096}
api_token: ${env.VLLM_API_TOKEN:fake}
tls_verify: ${env.VLLM_TLS_VERIFY:true}
- provider_id: sentence-transformers
provider_type: inline::sentence-transformers
config: {}
vector_io:
- provider_id: faiss
provider_type: inline::faiss
@ -133,7 +130,7 @@ models:
- metadata:
embedding_dimension: 384
model_id: all-MiniLM-L6-v2
provider_id: sentence-transformers
provider_id: vllm-inference
model_type: embedding
shields:
- shield_id: ${env.SAFETY_MODEL}