This commit is contained in:
Aidan Do 2024-11-26 10:29:46 +00:00
parent 5a845cebfd
commit f6f3f3c792

21
1.py
View file

@ -8,8 +8,16 @@ import os
from llama_stack_client import LlamaStackClient from llama_stack_client import LlamaStackClient
from pydantic import BaseModel
client = LlamaStackClient(base_url=f"http://localhost:{os.environ['LLAMA_STACK_PORT']}") client = LlamaStackClient(base_url=f"http://localhost:{os.environ['LLAMA_STACK_PORT']}")
class CompletionMessage(BaseModel):
content: str
additional_info: str
response = client.inference.chat_completion( response = client.inference.chat_completion(
model_id=os.environ["INFERENCE_MODEL"], model_id=os.environ["INFERENCE_MODEL"],
messages=[ messages=[
@ -18,18 +26,7 @@ response = client.inference.chat_completion(
], ],
response_format={ response_format={
"type": "json_schema", "type": "json_schema",
"json_schema": { "json_schema": CompletionMessage.model_json_schema(),
"type": "object",
"properties": {
"completion_message": {
"type": "object",
"properties": {
"content": {"type": "string"},
"additional_info": {"type": "string"},
},
}
},
},
}, },
) )
print(response.completion_message.content) print(response.completion_message.content)