refactor: Remove double filtering based on score threshold

This commit is contained in:
Varsha Prasad Narsing 2025-08-01 15:56:55 -07:00
parent 140ee7d337
commit 76ff6a5943
4 changed files with 13 additions and 7 deletions

View file

@ -160,8 +160,11 @@ class FaissIndex(EmbeddingIndex):
for d, i in zip(distances[0], indices[0], strict=False):
if i < 0:
continue
score = 1.0 / float(d) if d != 0 else float("inf")
if score < score_threshold:
continue
chunks.append(self.chunk_by_index[int(i)])
scores.append(1.0 / float(d) if d != 0 else float("inf"))
scores.append(score)
return QueryChunksResponse(chunks=chunks, scores=scores)

View file

@ -132,8 +132,11 @@ class PGVectorIndex(EmbeddingIndex):
chunks = []
scores = []
for doc, dist in results:
score = 1.0 / float(dist) if dist != 0 else float("inf")
if score < score_threshold:
continue
chunks.append(Chunk(**doc))
scores.append(1.0 / float(dist) if dist != 0 else float("inf"))
scores.append(score)
return QueryChunksResponse(chunks=chunks, scores=scores)

View file

@ -105,8 +105,12 @@ class WeaviateIndex(EmbeddingIndex):
log.exception(f"Failed to parse document: {chunk_json}")
continue
score = 1.0 / doc.metadata.distance if doc.metadata.distance != 0 else float("inf")
if score < score_threshold:
continue
chunks.append(chunk)
scores.append(1.0 / doc.metadata.distance if doc.metadata.distance != 0 else float("inf"))
scores.append(score)
return QueryChunksResponse(chunks=chunks, scores=scores)

View file

@ -433,10 +433,6 @@ class OpenAIVectorStoreMixin(ABC):
# Convert response to OpenAI format
data = []
for chunk, score in zip(response.chunks, response.scores, strict=False):
# Apply score based filtering
if score < score_threshold:
continue
# Apply filters if provided
if filters:
# Simple metadata filtering