From 9166baa716002422aee906534366416ce519fe35 Mon Sep 17 00:00:00 2001 From: Derek Higgins Date: Tue, 6 May 2025 11:24:38 +0100 Subject: [PATCH] Load OpenAIChatCompletion directly from YAML No need to iterate through the yaml when it can just be loaded. Signed-off-by: Derek Higgins --- .../meta_reference/fixtures/__init__.py | 45 +------------------ .../fixtures/simple_chat_completion.yaml | 1 + .../fixtures/tool_call_completion.yaml | 1 + 3 files changed, 3 insertions(+), 44 deletions(-) diff --git a/tests/unit/providers/agents/meta_reference/fixtures/__init__.py b/tests/unit/providers/agents/meta_reference/fixtures/__init__.py index 47c5d01ee..06da285eb 100644 --- a/tests/unit/providers/agents/meta_reference/fixtures/__init__.py +++ b/tests/unit/providers/agents/meta_reference/fixtures/__init__.py @@ -9,11 +9,7 @@ import os import yaml from llama_stack.apis.inference.inference import ( - OpenAIAssistantMessageParam, OpenAIChatCompletion, - OpenAIChatCompletionToolCall, - OpenAIChatCompletionToolCallFunction, - OpenAIChoice, ) @@ -32,43 +28,4 @@ def load_chat_completion_fixture(filename: str) -> OpenAIChatCompletion: with open(fixture_path) as f: data = yaml.safe_load(f) - - choices = [] - for choice_data in data.get("choices", []): - message_data = choice_data.get("message", {}) - - # Handle tool calls if present - tool_calls = None - if "tool_calls" in message_data: - tool_calls = [] - for tool_call_data in message_data.get("tool_calls", []): - function_data = tool_call_data.get("function", {}) - function = OpenAIChatCompletionToolCallFunction( - name=function_data.get("name"), - arguments=function_data.get("arguments"), - ) - tool_call = OpenAIChatCompletionToolCall( - id=tool_call_data.get("id"), - type=tool_call_data.get("type"), - function=function, - ) - tool_calls.append(tool_call) - - message = OpenAIAssistantMessageParam( - content=message_data.get("content"), - tool_calls=tool_calls, - ) - - choice = OpenAIChoice( - message=message, - finish_reason=choice_data.get("finish_reason"), - index=choice_data.get("index", 0), - ) - choices.append(choice) - - return OpenAIChatCompletion( - id=data.get("id"), - choices=choices, - created=data.get("created"), - model=data.get("model"), - ) + return OpenAIChatCompletion(**data) diff --git a/tests/unit/providers/agents/meta_reference/fixtures/simple_chat_completion.yaml b/tests/unit/providers/agents/meta_reference/fixtures/simple_chat_completion.yaml index 796eea9ca..4959349a0 100644 --- a/tests/unit/providers/agents/meta_reference/fixtures/simple_chat_completion.yaml +++ b/tests/unit/providers/agents/meta_reference/fixtures/simple_chat_completion.yaml @@ -2,6 +2,7 @@ id: chat-completion-123 choices: - message: content: "Dublin" + role: assistant finish_reason: stop index: 0 created: 1234567890 diff --git a/tests/unit/providers/agents/meta_reference/fixtures/tool_call_completion.yaml b/tests/unit/providers/agents/meta_reference/fixtures/tool_call_completion.yaml index df0da553c..f6532e3a9 100644 --- a/tests/unit/providers/agents/meta_reference/fixtures/tool_call_completion.yaml +++ b/tests/unit/providers/agents/meta_reference/fixtures/tool_call_completion.yaml @@ -7,6 +7,7 @@ choices: function: name: web_search arguments: '{"query":"What is the capital of Ireland?"}' + role: assistant finish_reason: stop index: 0 created: 1234567890