mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
undoing formatting and updating missed expansion parameterS
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
88ce118ba3
commit
2ebc56c3d9
13 changed files with 23 additions and 61 deletions
|
|
@ -1237,7 +1237,7 @@ async def test_query_expansion_functionality(vector_io_adapter):
|
|||
from unittest.mock import MagicMock
|
||||
|
||||
from llama_stack.core.datatypes import QualifiedModel, RewriteQueryParams
|
||||
from llama_stack.providers.utils.memory.constants import DEFAULT_QUERY_EXPANSION_PROMPT
|
||||
from llama_stack.providers.utils.memory.constants import DEFAULT_QUERY_REWRITE_PROMPT
|
||||
from llama_stack.providers.utils.memory.rewrite_query_config import set_default_rewrite_query_config
|
||||
from llama_stack.providers.utils.memory.vector_store import VectorStoreWithIndex
|
||||
from llama_stack_api import QueryChunksResponse
|
||||
|
|
@ -1288,7 +1288,7 @@ async def test_query_expansion_functionality(vector_io_adapter):
|
|||
|
||||
# Verify default prompt is used (contains our built-in prompt text)
|
||||
prompt_text = chat_call_args.messages[0].content
|
||||
expected_prompt = DEFAULT_QUERY_EXPANSION_PROMPT.format(query="test query")
|
||||
expected_prompt = DEFAULT_QUERY_REWRITE_PROMPT.format(query="test query")
|
||||
assert prompt_text == expected_prompt
|
||||
|
||||
# Verify default inference parameters are used
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue