mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
# What does this PR do? Extract API definitions and provider specifications into a standalone llama-stack-api package that can be published to PyPI independently of the main llama-stack server. see: https://github.com/llamastack/llama-stack/pull/2978 and https://github.com/llamastack/llama-stack/pull/2978#issuecomment-3145115942 Motivation External providers currently import from llama-stack, which overrides the installed version and causes dependency conflicts. This separation allows external providers to: - Install only the type definitions they need without server dependencies - Avoid version conflicts with the installed llama-stack package - Be versioned and released independently This enables us to re-enable external provider module tests that were previously blocked by these import conflicts. Changes - Created llama-stack-api package with minimal dependencies (pydantic, jsonschema) - Moved APIs, providers datatypes, strong_typing, and schema_utils - Updated all imports from llama_stack.* to llama_stack_api.* - Configured local editable install for development workflow - Updated linting and type-checking configuration for both packages Next Steps - Publish llama-stack-api to PyPI - Update external provider dependencies - Re-enable external provider module tests Pre-cursor PRs to this one: - #4093 - #3954 - #4064 These PRs moved key pieces _out_ of the Api pkg, limiting the scope of change here. relates to #3237 ## Test Plan Package builds successfully and can be imported independently. All pre-commit hooks pass with expected exclusions maintained. --------- Signed-off-by: Charlie Doern <cdoern@redhat.com>
49 lines
1.4 KiB
Python
49 lines
1.4 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
|
|
from llama_stack_api import Conversation, ConversationItem, ConversationItemList
|
|
|
|
|
|
def test_conversation_model_defaults():
|
|
conversation = Conversation(
|
|
id="conv_123456789",
|
|
created_at=1234567890,
|
|
metadata=None,
|
|
object="conversation",
|
|
)
|
|
assert conversation.id == "conv_123456789"
|
|
assert conversation.object == "conversation"
|
|
assert conversation.metadata is None
|
|
|
|
|
|
def test_openai_client_compatibility():
|
|
from openai.types.conversations.message import Message
|
|
from pydantic import TypeAdapter
|
|
|
|
openai_message = Message(
|
|
id="msg_123",
|
|
content=[{"type": "input_text", "text": "Hello"}],
|
|
role="user",
|
|
status="in_progress",
|
|
type="message",
|
|
object="message",
|
|
)
|
|
|
|
adapter = TypeAdapter(ConversationItem)
|
|
validated_item = adapter.validate_python(openai_message.model_dump())
|
|
|
|
assert validated_item.id == "msg_123"
|
|
assert validated_item.type == "message"
|
|
|
|
|
|
def test_conversation_item_list():
|
|
item_list = ConversationItemList(data=[])
|
|
assert item_list.object == "list"
|
|
assert item_list.data == []
|
|
assert item_list.first_id is None
|
|
assert item_list.last_id is None
|
|
assert item_list.has_more is False
|