From f6f3f3c792335bf0506e9c71a8a59dfb7053269c Mon Sep 17 00:00:00 2001 From: Aidan Do Date: Tue, 26 Nov 2024 10:29:46 +0000 Subject: [PATCH] . --- 1.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/1.py b/1.py index 86c09d627..d63e91883 100644 --- a/1.py +++ b/1.py @@ -8,8 +8,16 @@ import os from llama_stack_client import LlamaStackClient +from pydantic import BaseModel + client = LlamaStackClient(base_url=f"http://localhost:{os.environ['LLAMA_STACK_PORT']}") + +class CompletionMessage(BaseModel): + content: str + additional_info: str + + response = client.inference.chat_completion( model_id=os.environ["INFERENCE_MODEL"], messages=[ @@ -18,18 +26,7 @@ response = client.inference.chat_completion( ], response_format={ "type": "json_schema", - "json_schema": { - "type": "object", - "properties": { - "completion_message": { - "type": "object", - "properties": { - "content": {"type": "string"}, - "additional_info": {"type": "string"}, - }, - } - }, - }, + "json_schema": CompletionMessage.model_json_schema(), }, ) print(response.completion_message.content)