diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py index 2526dc1c3..aa04f1afa 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py @@ -127,9 +127,11 @@ class OpenAIResponsesImpl: # the instructions used on previous turns will not be carried over in the context previous_instructions = previous_response.instructions if previous_instructions: - if (isinstance(previous_instructions, str) and - previous_instructions == messages[0].content and - messages[0].role == "system"): + if ( + isinstance(previous_instructions, str) + and previous_instructions == messages[0].content + and messages[0].role == "system" + ): # Omit instructions from previous response del messages[0] else: diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses.py b/tests/unit/providers/agents/meta_reference/test_openai_responses.py index f625079e0..5e95439f5 100644 --- a/tests/unit/providers/agents/meta_reference/test_openai_responses.py +++ b/tests/unit/providers/agents/meta_reference/test_openai_responses.py @@ -844,7 +844,7 @@ async def test_create_openai_response_with_previous_response_instructions( OpenAIUserMessageParam(content="Name some towns in Ireland"), OpenAIAssistantMessageParam(content="Galway, Longford, Sligo"), ], - instructions="You are a helpful assistant." + instructions="You are a helpful assistant.", ) mock_responses_store.get_response_object.return_value = response