From e6fdb9df29de6b1723404de355801e8ec455b8a1 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Wed, 18 Sep 2024 08:24:36 -0700 Subject: [PATCH] fix context retriever (#75) --- .../impls/meta_reference/agents/rag/context_retriever.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/llama_stack/providers/impls/meta_reference/agents/rag/context_retriever.py b/llama_stack/providers/impls/meta_reference/agents/rag/context_retriever.py index 5ebb94a31..57e5d0dee 100644 --- a/llama_stack/providers/impls/meta_reference/agents/rag/context_retriever.py +++ b/llama_stack/providers/impls/meta_reference/agents/rag/context_retriever.py @@ -63,11 +63,9 @@ async def llm_rag_query_generator( model = config.model message = UserMessage(content=content) response = inference_api.chat_completion( - ChatCompletionRequest( - model=model, - messages=[message], - stream=False, - ) + model=model, + messages=[message], + stream=False, ) async for chunk in response: