This commit is contained in:
Aidan Do 2024-11-26 11:06:03 +00:00
parent 85ae899964
commit 5fba02da29

11
1.py
View file

@ -15,6 +15,7 @@ client = LlamaStackClient(base_url=f"http://localhost:{os.environ['LLAMA_STACK_P
class CompletionMessage(BaseModel): class CompletionMessage(BaseModel):
recipe_name: str recipe_name: str
preamble: str
ingredients: list[str] ingredients: list[str]
steps: list[str] steps: list[str]
@ -22,8 +23,14 @@ class CompletionMessage(BaseModel):
response = client.inference.chat_completion( response = client.inference.chat_completion(
model_id=os.environ["INFERENCE_MODEL"], model_id=os.environ["INFERENCE_MODEL"],
messages=[ messages=[
{"role": "system", "content": "You are a chef."}, {
{"role": "user", "content": "Give me a recipe for spaghetti bolognaise"}, "role": "system",
"content": "You are a chef, passionate about educating the world about delicious home cooked meals.",
},
{
"role": "user",
"content": "Give me a recipe for spaghetti bolognaise. Start with the recipe name, a preamble describing your childhood stories about spaghetti bolognaise, an ingredients list, and then the recipe steps.",
},
], ],
response_format={ response_format={
"type": "json_schema", "type": "json_schema",