From 9d039e884d34e75ebb9c86472b3eae504e8afd2d Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Thu, 9 Oct 2025 16:00:59 -0400 Subject: [PATCH] updated tests to verify usage of previous_response_id and conversation Signed-off-by: Francisco Javier Arceo --- llama_stack/log.py | 1 + .../meta_reference/responses/openai_responses.py | 5 +++++ .../responses/test_conversation_responses.py | 12 ++++++++++++ .../meta_reference/test_conversation_integration.py | 12 ++++++++++++ 4 files changed, 30 insertions(+) diff --git a/llama_stack/log.py b/llama_stack/log.py index ce92219f4..ff54b2f7c 100644 --- a/llama_stack/log.py +++ b/llama_stack/log.py @@ -30,6 +30,7 @@ CATEGORIES = [ "tools", "client", "telemetry", + "openai", "openai_responses", "openai_conversations", "testing", diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py index b317d6672..7e93db865 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py @@ -230,6 +230,11 @@ class OpenAIResponsesImpl: if shields is not None: raise NotImplementedError("Shields parameter is not yet implemented in the meta-reference provider") + if conversation is not None and previous_response_id is not None: + raise ValueError( + "Mutually exclusive parameters: 'previous_response_id' and 'conversation'. Ensure you are only providing one of these parameters." + ) + if conversation is not None: if not conversation.startswith("conv_"): raise InvalidConversationIdError(conversation) diff --git a/tests/integration/responses/test_conversation_responses.py b/tests/integration/responses/test_conversation_responses.py index ed9753884..ae798b081 100644 --- a/tests/integration/responses/test_conversation_responses.py +++ b/tests/integration/responses/test_conversation_responses.py @@ -102,6 +102,18 @@ class TestConversationResponses: ) assert any(word in str(exc_info.value).lower() for word in ["not found", "404"]) + response = openai_client.responses.create( + model=text_model_id, input=[{"role": "user", "content": "First response"}] + ) + with pytest.raises(Exception) as exc_info: + openai_client.responses.create( + model=text_model_id, + input=[{"role": "user", "content": "Hello"}], + conversation="conv_test123", + previous_response_id=response.id, + ) + assert "mutually exclusive" in str(exc_info.value).lower() + def test_conversation_backward_compatibility(self, openai_client, text_model_id): """Test that responses work without conversation parameter (backward compatibility).""" response = openai_client.responses.create( diff --git a/tests/unit/providers/agents/meta_reference/test_conversation_integration.py b/tests/unit/providers/agents/meta_reference/test_conversation_integration.py index fd99c7514..f0f4ac90e 100644 --- a/tests/unit/providers/agents/meta_reference/test_conversation_integration.py +++ b/tests/unit/providers/agents/meta_reference/test_conversation_integration.py @@ -330,3 +330,15 @@ class TestIntegrationWorkflow: ) assert "not found" in str(exc_info.value) + + async def test_conversation_and_previous_response_id( + self, responses_impl_with_conversations, mock_conversations_api, mock_responses_store + ): + with pytest.raises(ValueError) as exc_info: + await responses_impl_with_conversations.create_openai_response( + input="test", model="test", conversation="conv_123", previous_response_id="resp_123" + ) + + assert "Mutually exclusive parameters" in str(exc_info.value) + assert "previous_response_id" in str(exc_info.value) + assert "conversation" in str(exc_info.value)