fix: fixed import error (#1637)

# What does this PR do?
[Provide a short summary of what this PR does and why. Link to relevant
issues if applicable.]
The generate_response_prompt had an import error, fixed that error.

Co-authored-by: sarthakdeshpande <sarthak.deshpande@engati.com>
This commit is contained in:
Sarthak Deshpande 2025-03-18 02:34:47 +05:30 committed by GitHub
parent fb418813fc
commit dfa11a1216
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -18,7 +18,7 @@ import fire
from llama_stack.models.llama.sku_list import resolve_model
from llama_stack.providers.inline.inference.meta_reference.config import MetaReferenceInferenceConfig
from llama_stack.providers.inline.inference.meta_reference.generation import Llama
from llama_stack.providers.inline.inference.meta_reference.llama3.generation import Llama3
THIS_DIR = Path(__file__).parent.resolve()
@ -41,7 +41,7 @@ def run_main(
llama_model = resolve_model(model_id)
if not llama_model:
raise ValueError(f"Model {model_id} not found")
generator = Llama.build(
generator = Llama3.build(
config=config,
model_id=model_id,
llama_model=llama_model,