mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
update chroma interface to match new spec
This commit is contained in:
parent
0d0b8d2be1
commit
ea110857df
1 changed files with 6 additions and 2 deletions
|
@ -55,7 +55,7 @@ class ChromaIndex(EmbeddingIndex):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def query(self, embedding: NDArray, k: int, score_threshold: float) -> QueryChunksResponse:
|
async def query_vector(self, embedding: NDArray, k: int, score_threshold: float) -> QueryChunksResponse:
|
||||||
results = await maybe_await(
|
results = await maybe_await(
|
||||||
self.collection.query(
|
self.collection.query(
|
||||||
query_embeddings=[embedding.tolist()],
|
query_embeddings=[embedding.tolist()],
|
||||||
|
@ -76,8 +76,12 @@ class ChromaIndex(EmbeddingIndex):
|
||||||
log.exception(f"Failed to parse document: {doc}")
|
log.exception(f"Failed to parse document: {doc}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
score = 1.0 / float(dist) if dist != 0 else float("inf")
|
||||||
|
if score < score_threshold:
|
||||||
|
continue
|
||||||
|
|
||||||
chunks.append(chunk)
|
chunks.append(chunk)
|
||||||
scores.append(1.0 / float(dist))
|
scores.append(score)
|
||||||
|
|
||||||
return QueryChunksResponse(chunks=chunks, scores=scores)
|
return QueryChunksResponse(chunks=chunks, scores=scores)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue