mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
feat: Add OpenAI Conversations API
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
0e13512dd7
commit
a74a7cc873
18 changed files with 3280 additions and 1088 deletions
134
tests/integration/conversations/test_openai_conversations.py
Normal file
134
tests/integration/conversations/test_openai_conversations.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestOpenAIConversations:
|
||||
def test_conversation_create(self, openai_client):
|
||||
conversation = openai_client.conversations.create(
|
||||
metadata={"topic": "demo"}, items=[{"type": "message", "role": "user", "content": "Hello!"}]
|
||||
)
|
||||
|
||||
assert conversation.id.startswith("conv_")
|
||||
assert conversation.object == "conversation"
|
||||
assert conversation.metadata["topic"] == "demo"
|
||||
assert isinstance(conversation.created_at, int)
|
||||
|
||||
def test_conversation_retrieve(self, openai_client):
|
||||
conversation = openai_client.conversations.create(metadata={"topic": "demo"})
|
||||
|
||||
retrieved = openai_client.conversations.retrieve(conversation.id)
|
||||
|
||||
assert retrieved.id == conversation.id
|
||||
assert retrieved.object == "conversation"
|
||||
assert retrieved.metadata["topic"] == "demo"
|
||||
assert retrieved.created_at == conversation.created_at
|
||||
|
||||
def test_conversation_update(self, openai_client):
|
||||
conversation = openai_client.conversations.create(metadata={"topic": "demo"})
|
||||
|
||||
updated = openai_client.conversations.update(conversation.id, metadata={"topic": "project-x"})
|
||||
|
||||
assert updated.id == conversation.id
|
||||
assert updated.metadata["topic"] == "project-x"
|
||||
assert updated.created_at == conversation.created_at
|
||||
|
||||
def test_conversation_delete(self, openai_client):
|
||||
conversation = openai_client.conversations.create(metadata={"topic": "demo"})
|
||||
|
||||
deleted = openai_client.conversations.delete(conversation.id)
|
||||
|
||||
assert deleted.id == conversation.id
|
||||
assert deleted.object == "conversation.deleted"
|
||||
assert deleted.deleted is True
|
||||
|
||||
def test_conversation_items_create(self, openai_client):
|
||||
conversation = openai_client.conversations.create()
|
||||
|
||||
items = openai_client.conversations.items.create(
|
||||
conversation.id,
|
||||
items=[
|
||||
{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]},
|
||||
{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "How are you?"}]},
|
||||
],
|
||||
)
|
||||
|
||||
assert items.object == "list"
|
||||
assert len(items.data) == 2
|
||||
assert items.data[0].content[0].text == "Hello!"
|
||||
assert items.data[1].content[0].text == "How are you?"
|
||||
assert items.first_id == items.data[0].id
|
||||
assert items.last_id == items.data[1].id
|
||||
assert items.has_more is False
|
||||
|
||||
def test_conversation_items_list(self, openai_client):
|
||||
conversation = openai_client.conversations.create()
|
||||
|
||||
openai_client.conversations.items.create(
|
||||
conversation.id,
|
||||
items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
|
||||
)
|
||||
|
||||
items = openai_client.conversations.items.list(conversation.id, limit=10)
|
||||
|
||||
assert items.object == "list"
|
||||
assert len(items.data) >= 1
|
||||
assert items.data[0].type == "message"
|
||||
assert items.data[0].role == "user"
|
||||
assert hasattr(items, "first_id")
|
||||
assert hasattr(items, "last_id")
|
||||
assert hasattr(items, "has_more")
|
||||
|
||||
def test_conversation_item_retrieve(self, openai_client):
|
||||
conversation = openai_client.conversations.create()
|
||||
|
||||
created_items = openai_client.conversations.items.create(
|
||||
conversation.id,
|
||||
items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
|
||||
)
|
||||
|
||||
item_id = created_items.data[0].id
|
||||
item = openai_client.conversations.items.retrieve(item_id, conversation_id=conversation.id)
|
||||
|
||||
assert item.id == item_id
|
||||
assert item.type == "message"
|
||||
assert item.role == "user"
|
||||
assert item.content[0].text == "Hello!"
|
||||
|
||||
def test_conversation_item_delete(self, openai_client):
|
||||
conversation = openai_client.conversations.create()
|
||||
|
||||
created_items = openai_client.conversations.items.create(
|
||||
conversation.id,
|
||||
items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
|
||||
)
|
||||
|
||||
item_id = created_items.data[0].id
|
||||
deleted = openai_client.conversations.items.delete(item_id, conversation_id=conversation.id)
|
||||
|
||||
assert deleted.id == item_id
|
||||
assert deleted.object == "conversation.item.deleted"
|
||||
assert deleted.deleted is True
|
||||
|
||||
def test_full_workflow(self, openai_client):
|
||||
conversation = openai_client.conversations.create(
|
||||
metadata={"topic": "workflow-test"}, items=[{"type": "message", "role": "user", "content": "Hello!"}]
|
||||
)
|
||||
|
||||
openai_client.conversations.items.create(
|
||||
conversation.id,
|
||||
items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Follow up"}]}],
|
||||
)
|
||||
|
||||
all_items = openai_client.conversations.items.list(conversation.id)
|
||||
assert len(all_items.data) >= 2
|
||||
|
||||
updated = openai_client.conversations.update(conversation.id, metadata={"topic": "workflow-complete"})
|
||||
assert updated.metadata["topic"] == "workflow-complete"
|
||||
|
||||
openai_client.conversations.delete(conversation.id)
|
60
tests/unit/conversations/test_api_models.py
Normal file
60
tests/unit/conversations/test_api_models.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
|
||||
from llama_stack.apis.conversations.conversations import (
|
||||
Conversation,
|
||||
ConversationCreateRequest,
|
||||
ConversationItem,
|
||||
ConversationItemList,
|
||||
)
|
||||
|
||||
|
||||
def test_conversation_create_request_defaults():
|
||||
request = ConversationCreateRequest()
|
||||
assert request.items == []
|
||||
assert request.metadata == {}
|
||||
|
||||
|
||||
def test_conversation_model_defaults():
|
||||
conversation = Conversation(
|
||||
id="conv_123456789",
|
||||
created_at=1234567890,
|
||||
metadata=None,
|
||||
object="conversation",
|
||||
)
|
||||
assert conversation.id == "conv_123456789"
|
||||
assert conversation.object == "conversation"
|
||||
assert conversation.metadata is None
|
||||
|
||||
|
||||
def test_openai_client_compatibility():
|
||||
from openai.types.conversations.message import Message
|
||||
from pydantic import TypeAdapter
|
||||
|
||||
openai_message = Message(
|
||||
id="msg_123",
|
||||
content=[{"type": "input_text", "text": "Hello"}],
|
||||
role="user",
|
||||
status="in_progress",
|
||||
type="message",
|
||||
object="message",
|
||||
)
|
||||
|
||||
adapter = TypeAdapter(ConversationItem)
|
||||
validated_item = adapter.validate_python(openai_message.model_dump())
|
||||
|
||||
assert validated_item.id == "msg_123"
|
||||
assert validated_item.type == "message"
|
||||
|
||||
|
||||
def test_conversation_item_list():
|
||||
item_list = ConversationItemList(data=[])
|
||||
assert item_list.object == "list"
|
||||
assert item_list.data == []
|
||||
assert item_list.first_id is None
|
||||
assert item_list.last_id is None
|
||||
assert item_list.has_more is False
|
117
tests/unit/conversations/test_conversations.py
Normal file
117
tests/unit/conversations/test_conversations.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from openai.types.conversations.conversation import Conversation as OpenAIConversation
|
||||
from openai.types.conversations.conversation_item import ConversationItem as OpenAIConversationItem
|
||||
from pydantic import TypeAdapter
|
||||
|
||||
from llama_stack.apis.agents.openai_responses import (
|
||||
OpenAIResponseInputMessageContentText,
|
||||
OpenAIResponseMessage,
|
||||
)
|
||||
from llama_stack.core.conversations.conversations import (
|
||||
ConversationServiceConfig,
|
||||
ConversationServiceImpl,
|
||||
)
|
||||
from llama_stack.core.datatypes import StackRunConfig
|
||||
from llama_stack.providers.utils.sqlstore.sqlstore import SqliteSqlStoreConfig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def service():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
db_path = Path(tmpdir) / "test_conversations.db"
|
||||
|
||||
config = ConversationServiceConfig(
|
||||
run_config=StackRunConfig(
|
||||
image_name="test",
|
||||
providers={},
|
||||
conversations_store=SqliteSqlStoreConfig(db_path=str(db_path)),
|
||||
)
|
||||
)
|
||||
service = ConversationServiceImpl(config, {})
|
||||
await service.initialize()
|
||||
yield service
|
||||
|
||||
|
||||
async def test_conversation_lifecycle(service):
|
||||
conversation = await service.create_conversation(metadata={"test": "data"})
|
||||
|
||||
assert conversation.id.startswith("conv_")
|
||||
assert conversation.metadata == {"test": "data"}
|
||||
|
||||
retrieved = await service.get_conversation(conversation.id)
|
||||
assert retrieved.id == conversation.id
|
||||
|
||||
deleted = await service.openai_delete_conversation(conversation.id)
|
||||
assert deleted.id == conversation.id
|
||||
|
||||
|
||||
async def test_conversation_items(service):
|
||||
conversation = await service.create_conversation()
|
||||
|
||||
items = [
|
||||
OpenAIResponseMessage(
|
||||
type="message",
|
||||
role="user",
|
||||
content=[OpenAIResponseInputMessageContentText(type="input_text", text="Hello")],
|
||||
id="msg_test123",
|
||||
status="completed",
|
||||
)
|
||||
]
|
||||
item_list = await service.create(conversation.id, items)
|
||||
|
||||
assert len(item_list.data) == 1
|
||||
assert item_list.data[0].id.startswith("msg_")
|
||||
|
||||
items = await service.list(conversation.id)
|
||||
assert len(items.data) == 1
|
||||
|
||||
|
||||
async def test_invalid_conversation_id(service):
|
||||
with pytest.raises(ValueError, match="Expected an ID that begins with 'conv_'"):
|
||||
await service._get_validated_conversation("invalid_id")
|
||||
|
||||
|
||||
async def test_empty_parameter_validation(service):
|
||||
with pytest.raises(ValueError, match="Expected a non-empty value"):
|
||||
await service.retrieve("", "item_123")
|
||||
|
||||
|
||||
async def test_openai_type_compatibility(service):
|
||||
conversation = await service.create_conversation(metadata={"test": "value"})
|
||||
|
||||
conversation_dict = conversation.model_dump()
|
||||
openai_conversation = OpenAIConversation.model_validate(conversation_dict)
|
||||
|
||||
for attr in ["id", "object", "created_at", "metadata"]:
|
||||
assert getattr(openai_conversation, attr) == getattr(conversation, attr)
|
||||
|
||||
items = [
|
||||
OpenAIResponseMessage(
|
||||
type="message",
|
||||
role="user",
|
||||
content=[OpenAIResponseInputMessageContentText(type="input_text", text="Hello")],
|
||||
id="msg_test456",
|
||||
status="completed",
|
||||
)
|
||||
]
|
||||
item_list = await service.create(conversation.id, items)
|
||||
|
||||
for attr in ["object", "data", "first_id", "last_id", "has_more"]:
|
||||
assert hasattr(item_list, attr)
|
||||
assert item_list.object == "list"
|
||||
|
||||
items = await service.list(conversation.id)
|
||||
item = await service.retrieve(conversation.id, items.data[0].id)
|
||||
item_dict = item.model_dump()
|
||||
|
||||
openai_item_adapter = TypeAdapter(OpenAIConversationItem)
|
||||
openai_item_adapter.validate_python(item_dict)
|
Loading…
Add table
Add a link
Reference in a new issue