mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-14 16:22:38 +00:00
remove large try and except blocks and unnecessary _check_conversation_exists method
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
9d039e884d
commit
b2a883842b
2 changed files with 90 additions and 108 deletions
|
|
@ -25,7 +25,6 @@ from llama_stack.apis.agents.openai_responses import (
|
||||||
OpenAIResponseTextFormat,
|
OpenAIResponseTextFormat,
|
||||||
)
|
)
|
||||||
from llama_stack.apis.common.errors import (
|
from llama_stack.apis.common.errors import (
|
||||||
ConversationNotFoundError,
|
|
||||||
InvalidConversationIdError,
|
InvalidConversationIdError,
|
||||||
)
|
)
|
||||||
from llama_stack.apis.conversations import Conversations
|
from llama_stack.apis.conversations import Conversations
|
||||||
|
|
@ -239,10 +238,8 @@ class OpenAIResponsesImpl:
|
||||||
if not conversation.startswith("conv_"):
|
if not conversation.startswith("conv_"):
|
||||||
raise InvalidConversationIdError(conversation)
|
raise InvalidConversationIdError(conversation)
|
||||||
|
|
||||||
conversation_exists = await self._check_conversation_exists(conversation)
|
# Check conversation exists (raises ConversationNotFoundError if not)
|
||||||
if not conversation_exists:
|
_ = await self.conversations_api.get_conversation(conversation)
|
||||||
raise ConversationNotFoundError(conversation)
|
|
||||||
|
|
||||||
input = await self._load_conversation_context(conversation, input)
|
input = await self._load_conversation_context(conversation, input)
|
||||||
|
|
||||||
stream_gen = self._create_streaming_response(
|
stream_gen = self._create_streaming_response(
|
||||||
|
|
@ -359,19 +356,10 @@ class OpenAIResponsesImpl:
|
||||||
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||||
return await self.responses_store.delete_response_object(response_id)
|
return await self.responses_store.delete_response_object(response_id)
|
||||||
|
|
||||||
async def _check_conversation_exists(self, conversation_id: str) -> bool:
|
|
||||||
"""Check if a conversation exists."""
|
|
||||||
try:
|
|
||||||
await self.conversations_api.get_conversation(conversation_id)
|
|
||||||
return True
|
|
||||||
except ConversationNotFoundError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _load_conversation_context(
|
async def _load_conversation_context(
|
||||||
self, conversation_id: str, content: str | list[OpenAIResponseInput]
|
self, conversation_id: str, content: str | list[OpenAIResponseInput]
|
||||||
) -> list[OpenAIResponseInput]:
|
) -> list[OpenAIResponseInput]:
|
||||||
"""Load conversation history and merge with provided content."""
|
"""Load conversation history and merge with provided content."""
|
||||||
try:
|
|
||||||
conversation_items = await self.conversations_api.list(conversation_id, order="asc")
|
conversation_items = await self.conversations_api.list(conversation_id, order="asc")
|
||||||
|
|
||||||
context_messages = []
|
context_messages = []
|
||||||
|
|
@ -398,17 +386,10 @@ class OpenAIResponsesImpl:
|
||||||
|
|
||||||
return context_messages
|
return context_messages
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to load conversation context for {conversation_id}: {e}")
|
|
||||||
if isinstance(content, str):
|
|
||||||
return [OpenAIResponseMessage(role="user", content=content)]
|
|
||||||
return content
|
|
||||||
|
|
||||||
async def _sync_response_to_conversation(
|
async def _sync_response_to_conversation(
|
||||||
self, conversation_id: str, content: str | list[OpenAIResponseInput], response: OpenAIResponseObject
|
self, conversation_id: str, content: str | list[OpenAIResponseInput], response: OpenAIResponseObject
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Sync content and response messages to the conversation."""
|
"""Sync content and response messages to the conversation."""
|
||||||
try:
|
|
||||||
conversation_items = []
|
conversation_items = []
|
||||||
|
|
||||||
# add user content message(s)
|
# add user content message(s)
|
||||||
|
|
@ -418,7 +399,10 @@ class OpenAIResponsesImpl:
|
||||||
)
|
)
|
||||||
elif isinstance(content, list):
|
elif isinstance(content, list):
|
||||||
for item in content:
|
for item in content:
|
||||||
if isinstance(item, OpenAIResponseMessage) and item.role == "user":
|
if not isinstance(item, OpenAIResponseMessage):
|
||||||
|
raise NotImplementedError(f"Unsupported input item type: {type(item)}")
|
||||||
|
|
||||||
|
if item.role == "user":
|
||||||
if isinstance(item.content, str):
|
if isinstance(item.content, str):
|
||||||
conversation_items.append(
|
conversation_items.append(
|
||||||
{
|
{
|
||||||
|
|
@ -429,22 +413,27 @@ class OpenAIResponsesImpl:
|
||||||
)
|
)
|
||||||
elif isinstance(item.content, list):
|
elif isinstance(item.content, list):
|
||||||
conversation_items.append({"type": "message", "role": "user", "content": item.content})
|
conversation_items.append({"type": "message", "role": "user", "content": item.content})
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Unsupported user message content type: {type(item.content)}")
|
||||||
|
elif item.role == "assistant":
|
||||||
|
if isinstance(item.content, list):
|
||||||
|
conversation_items.append({"type": "message", "role": "assistant", "content": item.content})
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Unsupported assistant message content type: {type(item.content)}")
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Unsupported message role: {item.role}")
|
||||||
|
|
||||||
# add assistant response message
|
# add assistant response message
|
||||||
for output_item in response.output:
|
for output_item in response.output:
|
||||||
if isinstance(output_item, OpenAIResponseMessage) and output_item.role == "assistant":
|
if isinstance(output_item, OpenAIResponseMessage) and output_item.role == "assistant":
|
||||||
if hasattr(output_item, "content") and isinstance(output_item.content, list):
|
if hasattr(output_item, "content") and isinstance(output_item.content, list):
|
||||||
conversation_items.append(
|
conversation_items.append({"type": "message", "role": "assistant", "content": output_item.content})
|
||||||
{"type": "message", "role": "assistant", "content": output_item.content}
|
|
||||||
)
|
|
||||||
|
|
||||||
if conversation_items:
|
if conversation_items:
|
||||||
adapter = TypeAdapter(list[ConversationItem])
|
adapter = TypeAdapter(list[ConversationItem])
|
||||||
validated_items = adapter.validate_python(conversation_items)
|
validated_items = adapter.validate_python(conversation_items)
|
||||||
|
try:
|
||||||
await self.conversations_api.add_items(conversation_id, validated_items)
|
await self.conversations_api.add_items(conversation_id, validated_items)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to sync response {response.id} to conversation {conversation_id}: {e}")
|
logger.error(f"Failed to sync response {response.id} to conversation {conversation_id}: {e}")
|
||||||
# don't fail response creation if conversation sync fails
|
# don't fail response creation if conversation sync fails
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
|
||||||
|
|
@ -53,33 +53,19 @@ def responses_impl_with_conversations(
|
||||||
class TestConversationValidation:
|
class TestConversationValidation:
|
||||||
"""Test conversation ID validation logic."""
|
"""Test conversation ID validation logic."""
|
||||||
|
|
||||||
async def test_conversation_existence_check_valid(self, responses_impl_with_conversations, mock_conversations_api):
|
async def test_nonexistent_conversation_raises_error(
|
||||||
"""Test conversation existence check for valid conversation."""
|
|
||||||
conv_id = "conv_valid123"
|
|
||||||
|
|
||||||
# Mock successful conversation retrieval
|
|
||||||
mock_conversations_api.get_conversation.return_value = Conversation(
|
|
||||||
id=conv_id, created_at=1234567890, metadata={}, object="conversation"
|
|
||||||
)
|
|
||||||
|
|
||||||
result = await responses_impl_with_conversations._check_conversation_exists(conv_id)
|
|
||||||
|
|
||||||
assert result is True
|
|
||||||
mock_conversations_api.get_conversation.assert_called_once_with(conv_id)
|
|
||||||
|
|
||||||
async def test_conversation_existence_check_invalid(
|
|
||||||
self, responses_impl_with_conversations, mock_conversations_api
|
self, responses_impl_with_conversations, mock_conversations_api
|
||||||
):
|
):
|
||||||
"""Test conversation existence check for non-existent conversation."""
|
"""Test that ConversationNotFoundError is raised for non-existent conversation."""
|
||||||
conv_id = "conv_nonexistent"
|
conv_id = "conv_nonexistent"
|
||||||
|
|
||||||
# Mock conversation not found
|
# Mock conversation not found
|
||||||
mock_conversations_api.get_conversation.side_effect = ConversationNotFoundError("conv_nonexistent")
|
mock_conversations_api.get_conversation.side_effect = ConversationNotFoundError("conv_nonexistent")
|
||||||
|
|
||||||
result = await responses_impl_with_conversations._check_conversation_exists(conv_id)
|
with pytest.raises(ConversationNotFoundError):
|
||||||
|
await responses_impl_with_conversations.create_openai_response(
|
||||||
assert result is False
|
input="Hello", model="test-model", conversation=conv_id, stream=False
|
||||||
mock_conversations_api.get_conversation.assert_called_once_with(conv_id)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestConversationContextLoading:
|
class TestConversationContextLoading:
|
||||||
|
|
@ -137,12 +123,8 @@ class TestConversationContextLoading:
|
||||||
|
|
||||||
mock_conversations_api.list.side_effect = Exception("API Error")
|
mock_conversations_api.list.side_effect = Exception("API Error")
|
||||||
|
|
||||||
result = await responses_impl_with_conversations._load_conversation_context(conv_id, input_text)
|
with pytest.raises(Exception, match="API Error"):
|
||||||
|
await responses_impl_with_conversations._load_conversation_context(conv_id, input_text)
|
||||||
assert len(result) == 1
|
|
||||||
assert isinstance(result[0], OpenAIResponseMessage)
|
|
||||||
assert result[0].role == "user"
|
|
||||||
assert result[0].content == input_text
|
|
||||||
|
|
||||||
async def test_load_conversation_context_with_list_input(
|
async def test_load_conversation_context_with_list_input(
|
||||||
self, responses_impl_with_conversations, mock_conversations_api
|
self, responses_impl_with_conversations, mock_conversations_api
|
||||||
|
|
@ -236,20 +218,31 @@ class TestMessageSyncing:
|
||||||
async def test_sync_response_to_conversation_api_error(
|
async def test_sync_response_to_conversation_api_error(
|
||||||
self, responses_impl_with_conversations, mock_conversations_api
|
self, responses_impl_with_conversations, mock_conversations_api
|
||||||
):
|
):
|
||||||
"""Test syncing when conversations API call fails."""
|
mock_conversations_api.add_items.side_effect = Exception("API Error")
|
||||||
conv_id = "conv_test123"
|
|
||||||
|
|
||||||
mock_response = OpenAIResponseObject(
|
mock_response = OpenAIResponseObject(
|
||||||
id="resp_123", created_at=1234567890, model="test-model", object="response", output=[], status="completed"
|
id="resp_123", created_at=1234567890, model="test-model", object="response", output=[], status="completed"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Mock API error
|
result = await responses_impl_with_conversations._sync_response_to_conversation(
|
||||||
mock_conversations_api.add_items.side_effect = Exception("API Error")
|
"conv_test123", "Hello", mock_response
|
||||||
|
)
|
||||||
# Should not raise exception (graceful failure)
|
|
||||||
result = await responses_impl_with_conversations._sync_response_to_conversation(conv_id, "Hello", mock_response)
|
|
||||||
assert result is None
|
assert result is None
|
||||||
|
|
||||||
|
async def test_sync_unsupported_types(self, responses_impl_with_conversations):
|
||||||
|
mock_response = OpenAIResponseObject(
|
||||||
|
id="resp_123", created_at=1234567890, model="test-model", object="response", output=[], status="completed"
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(NotImplementedError, match="Unsupported input item type"):
|
||||||
|
await responses_impl_with_conversations._sync_response_to_conversation(
|
||||||
|
"conv_123", [{"not": "message"}], mock_response
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(NotImplementedError, match="Unsupported message role: system"):
|
||||||
|
await responses_impl_with_conversations._sync_response_to_conversation(
|
||||||
|
"conv_123", [OpenAIResponseMessage(role="system", content="test")], mock_response
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestIntegrationWorkflow:
|
class TestIntegrationWorkflow:
|
||||||
"""Integration tests for the full conversation workflow."""
|
"""Integration tests for the full conversation workflow."""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue