From a30b919ae1964b5215fc95a6bddf953b7e275f33 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Thu, 19 Sep 2024 12:01:46 -0700 Subject: [PATCH] update inference prompt msg --- llama_stack/apis/inference/client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama_stack/apis/inference/client.py b/llama_stack/apis/inference/client.py index f5321c628..4d67fb4f6 100644 --- a/llama_stack/apis/inference/client.py +++ b/llama_stack/apis/inference/client.py @@ -86,7 +86,9 @@ class InferenceClient(Inference): async def run_main(host: str, port: int, stream: bool): client = InferenceClient(f"http://{host}:{port}") - message = UserMessage(content="hello world, troll me in two-paragraphs about 42") + message = UserMessage( + content="hello world, write me a 2 sentence poem about the moon" + ) cprint(f"User>{message.content}", "green") iterator = client.chat_completion( ChatCompletionRequest(