mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 10:54:19 +00:00
# What does this PR do? This is not part of the official OpenAI API, but we'll use this for the logs UI. In order to support more filtering options, I'm adopting the newly introduced sql store in in place of the kv store. ## Test Plan Added integration/unit tests.
29 lines
701 B
Python
29 lines
701 B
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
from enum import Enum
|
|
from typing import Any
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from llama_stack.schema_utils import json_schema_type
|
|
|
|
|
|
class Order(Enum):
|
|
asc = "asc"
|
|
desc = "desc"
|
|
|
|
|
|
@json_schema_type
|
|
class PaginatedResponse(BaseModel):
|
|
"""A generic paginated response that follows a simple format.
|
|
|
|
:param data: The list of items for the current page
|
|
:param has_more: Whether there are more items available after this set
|
|
"""
|
|
|
|
data: list[dict[str, Any]]
|
|
has_more: bool
|