mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-02 08:44:44 +00:00
Load OpenAIChatCompletion directly from YAML
No need to iterate through the yaml when it can just be loaded. Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
dfb641a3de
commit
9166baa716
3 changed files with 3 additions and 44 deletions
|
@ -9,11 +9,7 @@ import os
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from llama_stack.apis.inference.inference import (
|
from llama_stack.apis.inference.inference import (
|
||||||
OpenAIAssistantMessageParam,
|
|
||||||
OpenAIChatCompletion,
|
OpenAIChatCompletion,
|
||||||
OpenAIChatCompletionToolCall,
|
|
||||||
OpenAIChatCompletionToolCallFunction,
|
|
||||||
OpenAIChoice,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,43 +28,4 @@ def load_chat_completion_fixture(filename: str) -> OpenAIChatCompletion:
|
||||||
|
|
||||||
with open(fixture_path) as f:
|
with open(fixture_path) as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
return OpenAIChatCompletion(**data)
|
||||||
choices = []
|
|
||||||
for choice_data in data.get("choices", []):
|
|
||||||
message_data = choice_data.get("message", {})
|
|
||||||
|
|
||||||
# Handle tool calls if present
|
|
||||||
tool_calls = None
|
|
||||||
if "tool_calls" in message_data:
|
|
||||||
tool_calls = []
|
|
||||||
for tool_call_data in message_data.get("tool_calls", []):
|
|
||||||
function_data = tool_call_data.get("function", {})
|
|
||||||
function = OpenAIChatCompletionToolCallFunction(
|
|
||||||
name=function_data.get("name"),
|
|
||||||
arguments=function_data.get("arguments"),
|
|
||||||
)
|
|
||||||
tool_call = OpenAIChatCompletionToolCall(
|
|
||||||
id=tool_call_data.get("id"),
|
|
||||||
type=tool_call_data.get("type"),
|
|
||||||
function=function,
|
|
||||||
)
|
|
||||||
tool_calls.append(tool_call)
|
|
||||||
|
|
||||||
message = OpenAIAssistantMessageParam(
|
|
||||||
content=message_data.get("content"),
|
|
||||||
tool_calls=tool_calls,
|
|
||||||
)
|
|
||||||
|
|
||||||
choice = OpenAIChoice(
|
|
||||||
message=message,
|
|
||||||
finish_reason=choice_data.get("finish_reason"),
|
|
||||||
index=choice_data.get("index", 0),
|
|
||||||
)
|
|
||||||
choices.append(choice)
|
|
||||||
|
|
||||||
return OpenAIChatCompletion(
|
|
||||||
id=data.get("id"),
|
|
||||||
choices=choices,
|
|
||||||
created=data.get("created"),
|
|
||||||
model=data.get("model"),
|
|
||||||
)
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ id: chat-completion-123
|
||||||
choices:
|
choices:
|
||||||
- message:
|
- message:
|
||||||
content: "Dublin"
|
content: "Dublin"
|
||||||
|
role: assistant
|
||||||
finish_reason: stop
|
finish_reason: stop
|
||||||
index: 0
|
index: 0
|
||||||
created: 1234567890
|
created: 1234567890
|
||||||
|
|
|
@ -7,6 +7,7 @@ choices:
|
||||||
function:
|
function:
|
||||||
name: web_search
|
name: web_search
|
||||||
arguments: '{"query":"What is the capital of Ireland?"}'
|
arguments: '{"query":"What is the capital of Ireland?"}'
|
||||||
|
role: assistant
|
||||||
finish_reason: stop
|
finish_reason: stop
|
||||||
index: 0
|
index: 0
|
||||||
created: 1234567890
|
created: 1234567890
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue