mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-28 10:41:59 +00:00
Reverted the change in handling the case of no chunks found.
This commit is contained in:
parent
82b485b177
commit
57e28b5b9b
2 changed files with 2 additions and 16 deletions
|
|
@ -129,7 +129,7 @@ class MemoryToolRuntimeImpl(ToolsProtocolPrivate, ToolRuntime, RAGToolRuntime):
|
|||
scores = [s for r in results for s in r.scores]
|
||||
|
||||
if not chunks:
|
||||
raise ValueError("The knowledge search tool did not find any information relevant to the query.")
|
||||
return RAGQueryResult(content=None)
|
||||
|
||||
# sort by score
|
||||
chunks, scores = zip(*sorted(zip(chunks, scores, strict=False), key=lambda x: x[1], reverse=True), strict=False) # type: ignore
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue