[Evals API][2/n] datasets / datasetio meta-reference implementation (#288)

* skeleton dataset / datasetio

* dataset datasetio

* config

* address comments

* delete dataset_utils

* address comments

* naming fix
This commit is contained in:
Xi Yan 2024-10-22 16:12:16 -07:00 committed by GitHub
parent 8a01b9e40c
commit 821810657f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 452 additions and 8 deletions

View file

@ -50,11 +50,5 @@ class Datasets(Protocol):
dataset_identifier: str, dataset_identifier: str,
) -> Optional[DatasetDefWithProvider]: ... ) -> Optional[DatasetDefWithProvider]: ...
@webmethod(route="/datasets/delete")
async def delete_dataset(
self,
dataset_identifier: str,
) -> None: ...
@webmethod(route="/datasets/list", method="GET") @webmethod(route="/datasets/list", method="GET")
async def list_datasets(self) -> List[DatasetDefWithProvider]: ... async def list_datasets(self) -> List[DatasetDefWithProvider]: ...

View file

@ -14,11 +14,12 @@ from llama_stack.providers.datatypes import * # noqa: F403
from llama_stack.apis.models import * # noqa: F403 from llama_stack.apis.models import * # noqa: F403
from llama_stack.apis.shields import * # noqa: F403 from llama_stack.apis.shields import * # noqa: F403
from llama_stack.apis.memory_banks import * # noqa: F403 from llama_stack.apis.memory_banks import * # noqa: F403
from llama_stack.apis.datasets import * # noqa: F403
from llama_stack.apis.datasetio import DatasetIO
from llama_stack.apis.inference import Inference from llama_stack.apis.inference import Inference
from llama_stack.apis.memory import Memory from llama_stack.apis.memory import Memory
from llama_stack.apis.safety import Safety from llama_stack.apis.safety import Safety
LLAMA_STACK_BUILD_CONFIG_VERSION = "2" LLAMA_STACK_BUILD_CONFIG_VERSION = "2"
LLAMA_STACK_RUN_CONFIG_VERSION = "2" LLAMA_STACK_RUN_CONFIG_VERSION = "2"
@ -30,18 +31,21 @@ RoutableObject = Union[
ModelDef, ModelDef,
ShieldDef, ShieldDef,
MemoryBankDef, MemoryBankDef,
DatasetDef,
] ]
RoutableObjectWithProvider = Union[ RoutableObjectWithProvider = Union[
ModelDefWithProvider, ModelDefWithProvider,
ShieldDefWithProvider, ShieldDefWithProvider,
MemoryBankDefWithProvider, MemoryBankDefWithProvider,
DatasetDefWithProvider,
] ]
RoutedProtocol = Union[ RoutedProtocol = Union[
Inference, Inference,
Safety, Safety,
Memory, Memory,
DatasetIO,
] ]

View file

@ -35,6 +35,10 @@ def builtin_automatically_routed_apis() -> List[AutoRoutedApiInfo]:
routing_table_api=Api.memory_banks, routing_table_api=Api.memory_banks,
router_api=Api.memory, router_api=Api.memory,
), ),
AutoRoutedApiInfo(
routing_table_api=Api.datasets,
router_api=Api.datasetio,
),
] ]

View file

@ -12,6 +12,8 @@ from llama_stack.providers.datatypes import * # noqa: F403
from llama_stack.distribution.datatypes import * # noqa: F403 from llama_stack.distribution.datatypes import * # noqa: F403
from llama_stack.apis.agents import Agents from llama_stack.apis.agents import Agents
from llama_stack.apis.datasetio import DatasetIO
from llama_stack.apis.datasets import Datasets
from llama_stack.apis.inference import Inference from llama_stack.apis.inference import Inference
from llama_stack.apis.inspect import Inspect from llama_stack.apis.inspect import Inspect
from llama_stack.apis.memory import Memory from llama_stack.apis.memory import Memory
@ -38,6 +40,8 @@ def api_protocol_map() -> Dict[Api, Any]:
Api.safety: Safety, Api.safety: Safety,
Api.shields: Shields, Api.shields: Shields,
Api.telemetry: Telemetry, Api.telemetry: Telemetry,
Api.datasets: Datasets,
Api.datasetio: DatasetIO,
} }

View file

@ -8,6 +8,7 @@ from typing import Any
from llama_stack.distribution.datatypes import * # noqa: F403 from llama_stack.distribution.datatypes import * # noqa: F403
from .routing_tables import ( from .routing_tables import (
DatasetsRoutingTable,
MemoryBanksRoutingTable, MemoryBanksRoutingTable,
ModelsRoutingTable, ModelsRoutingTable,
ShieldsRoutingTable, ShieldsRoutingTable,
@ -23,6 +24,7 @@ async def get_routing_table_impl(
"memory_banks": MemoryBanksRoutingTable, "memory_banks": MemoryBanksRoutingTable,
"models": ModelsRoutingTable, "models": ModelsRoutingTable,
"shields": ShieldsRoutingTable, "shields": ShieldsRoutingTable,
"datasets": DatasetsRoutingTable,
} }
if api.value not in api_to_tables: if api.value not in api_to_tables:
raise ValueError(f"API {api.value} not found in router map") raise ValueError(f"API {api.value} not found in router map")
@ -33,12 +35,13 @@ async def get_routing_table_impl(
async def get_auto_router_impl(api: Api, routing_table: RoutingTable, _deps) -> Any: async def get_auto_router_impl(api: Api, routing_table: RoutingTable, _deps) -> Any:
from .routers import InferenceRouter, MemoryRouter, SafetyRouter from .routers import DatasetIORouter, InferenceRouter, MemoryRouter, SafetyRouter
api_to_routers = { api_to_routers = {
"memory": MemoryRouter, "memory": MemoryRouter,
"inference": InferenceRouter, "inference": InferenceRouter,
"safety": SafetyRouter, "safety": SafetyRouter,
"datasetio": DatasetIORouter,
} }
if api.value not in api_to_routers: if api.value not in api_to_routers:
raise ValueError(f"API {api.value} not found in router map") raise ValueError(f"API {api.value} not found in router map")

View file

@ -6,11 +6,13 @@
from typing import Any, AsyncGenerator, Dict, List from typing import Any, AsyncGenerator, Dict, List
from llama_stack.apis.datasetio.datasetio import DatasetIO
from llama_stack.distribution.datatypes import RoutingTable from llama_stack.distribution.datatypes import RoutingTable
from llama_stack.apis.memory import * # noqa: F403 from llama_stack.apis.memory import * # noqa: F403
from llama_stack.apis.inference import * # noqa: F403 from llama_stack.apis.inference import * # noqa: F403
from llama_stack.apis.safety import * # noqa: F403 from llama_stack.apis.safety import * # noqa: F403
from llama_stack.apis.datasetio import * # noqa: F403
class MemoryRouter(Memory): class MemoryRouter(Memory):
@ -160,3 +162,33 @@ class SafetyRouter(Safety):
messages=messages, messages=messages,
params=params, params=params,
) )
class DatasetIORouter(DatasetIO):
def __init__(
self,
routing_table: RoutingTable,
) -> None:
self.routing_table = routing_table
async def initialize(self) -> None:
pass
async def shutdown(self) -> None:
pass
async def get_rows_paginated(
self,
dataset_id: str,
rows_in_page: int,
page_token: Optional[str] = None,
filter_condition: Optional[str] = None,
) -> PaginatedRowsResult:
return await self.routing_table.get_provider_impl(
dataset_id
).get_rows_paginated(
dataset_id=dataset_id,
rows_in_page=rows_in_page,
page_token=page_token,
filter_condition=filter_condition,
)

View file

@ -11,6 +11,7 @@ from llama_models.llama3.api.datatypes import * # noqa: F403
from llama_stack.apis.models import * # noqa: F403 from llama_stack.apis.models import * # noqa: F403
from llama_stack.apis.shields import * # noqa: F403 from llama_stack.apis.shields import * # noqa: F403
from llama_stack.apis.memory_banks import * # noqa: F403 from llama_stack.apis.memory_banks import * # noqa: F403
from llama_stack.apis.datasets import * # noqa: F403
from llama_stack.distribution.datatypes import * # noqa: F403 from llama_stack.distribution.datatypes import * # noqa: F403
@ -27,6 +28,10 @@ async def register_object_with_provider(obj: RoutableObject, p: Any) -> None:
await p.register_shield(obj) await p.register_shield(obj)
elif api == Api.memory: elif api == Api.memory:
await p.register_memory_bank(obj) await p.register_memory_bank(obj)
elif api == Api.datasetio:
await p.register_dataset(obj)
else:
raise ValueError(f"Unknown API {api} for registering object with provider")
Registry = Dict[str, List[RoutableObjectWithProvider]] Registry = Dict[str, List[RoutableObjectWithProvider]]
@ -80,6 +85,16 @@ class CommonRoutingTableImpl(RoutingTable):
add_objects(memory_banks) add_objects(memory_banks)
elif api == Api.datasetio:
p.dataset_store = self
datasets = await p.list_datasets()
# do in-memory updates due to pesky Annotated unions
for d in datasets:
d.provider_id = pid
add_objects(datasets)
async def shutdown(self) -> None: async def shutdown(self) -> None:
for p in self.impls_by_provider_id.values(): for p in self.impls_by_provider_id.values():
await p.shutdown() await p.shutdown()
@ -137,6 +152,7 @@ class CommonRoutingTableImpl(RoutingTable):
raise ValueError(f"Provider `{obj.provider_id}` not found") raise ValueError(f"Provider `{obj.provider_id}` not found")
p = self.impls_by_provider_id[obj.provider_id] p = self.impls_by_provider_id[obj.provider_id]
await register_object_with_provider(obj, p) await register_object_with_provider(obj, p)
if obj.identifier not in self.registry: if obj.identifier not in self.registry:
@ -190,3 +206,19 @@ class MemoryBanksRoutingTable(CommonRoutingTableImpl, MemoryBanks):
self, memory_bank: MemoryBankDefWithProvider self, memory_bank: MemoryBankDefWithProvider
) -> None: ) -> None:
await self.register_object(memory_bank) await self.register_object(memory_bank)
class DatasetsRoutingTable(CommonRoutingTableImpl, Datasets):
async def list_datasets(self) -> List[DatasetDefWithProvider]:
objects = []
for objs in self.registry.values():
objects.extend(objs)
return objects
async def get_dataset(
self, dataset_identifier: str
) -> Optional[DatasetDefWithProvider]:
return self.get_object_by_identifier(identifier)
async def register_dataset(self, dataset_def: DatasetDefWithProvider) -> None:
await self.register_object(dataset_def)

View file

@ -10,6 +10,8 @@ from typing import Any, List, Optional, Protocol
from llama_models.schema_utils import json_schema_type from llama_models.schema_utils import json_schema_type
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from llama_stack.apis.datasets import DatasetDef
from llama_stack.apis.memory_banks import MemoryBankDef from llama_stack.apis.memory_banks import MemoryBankDef
from llama_stack.apis.models import ModelDef from llama_stack.apis.models import ModelDef
@ -22,12 +24,14 @@ class Api(Enum):
safety = "safety" safety = "safety"
agents = "agents" agents = "agents"
memory = "memory" memory = "memory"
datasetio = "datasetio"
telemetry = "telemetry" telemetry = "telemetry"
models = "models" models = "models"
shields = "shields" shields = "shields"
memory_banks = "memory_banks" memory_banks = "memory_banks"
datasets = "datasets"
# built-in API # built-in API
inspect = "inspect" inspect = "inspect"
@ -51,6 +55,12 @@ class MemoryBanksProtocolPrivate(Protocol):
async def register_memory_bank(self, memory_bank: MemoryBankDef) -> None: ... async def register_memory_bank(self, memory_bank: MemoryBankDef) -> None: ...
class DatasetsProtocolPrivate(Protocol):
async def list_datasets(self) -> List[DatasetDef]: ...
async def register_datasets(self, dataset_def: DatasetDef) -> None: ...
@json_schema_type @json_schema_type
class ProviderSpec(BaseModel): class ProviderSpec(BaseModel):
api: Api api: Api

View file

@ -0,0 +1,18 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from .config import MetaReferenceDatasetIOConfig
async def get_provider_impl(
config: MetaReferenceDatasetIOConfig,
_deps,
):
from .datasetio import MetaReferenceDatasetIOImpl
impl = MetaReferenceDatasetIOImpl(config)
await impl.initialize()
return impl

View file

@ -0,0 +1,9 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from llama_stack.apis.datasetio import * # noqa: F401, F403
class MetaReferenceDatasetIOConfig(BaseModel): ...

View file

@ -0,0 +1,142 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List, Optional
import pandas
from llama_models.llama3.api.datatypes import * # noqa: F403
from llama_stack.apis.datasetio import * # noqa: F403
from abc import ABC, abstractmethod
from dataclasses import dataclass
from llama_stack.providers.datatypes import DatasetsProtocolPrivate
from .config import MetaReferenceDatasetIOConfig
class BaseDataset(ABC):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@abstractmethod
def __len__(self) -> int:
raise NotImplementedError()
@abstractmethod
def __getitem__(self, idx):
raise NotImplementedError()
@abstractmethod
def load(self):
raise NotImplementedError()
@dataclass
class DatasetInfo:
dataset_def: DatasetDef
dataset_impl: BaseDataset
class PandasDataframeDataset(BaseDataset):
def __init__(self, dataset_def: DatasetDef, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.dataset_def = dataset_def
self.df = None
def __len__(self) -> int:
assert self.df is not None, "Dataset not loaded. Please call .load() first"
return len(self.df)
def __getitem__(self, idx):
if isinstance(idx, slice):
return self.df.iloc[idx].to_dict(orient="records")
else:
return self.df.iloc[idx].to_dict()
def load(self) -> None:
if self.df is not None:
return
# TODO: more robust support w/ data url
if self.dataset_def.url.uri.endswith(".csv"):
df = pandas.read_csv(self.dataset_def.url.uri)
elif self.dataset_def.url.uri.endswith(".xlsx"):
df = pandas.read_excel(self.dataset_def.url.uri)
elif self.dataset_def.url.uri.startswith("data:"):
parts = parse_data_url(self.dataset_def.url.uri)
data = parts["data"]
if parts["is_base64"]:
data = base64.b64decode(data)
else:
data = unquote(data)
encoding = parts["encoding"] or "utf-8"
data = data.encode(encoding)
mime_type = parts["mimetype"]
mime_category = mime_type.split("/")[0]
data_bytes = io.BytesIO(data)
if mime_category == "text":
df = pandas.read_csv(data_bytes)
else:
df = pandas.read_excel(data_bytes)
else:
raise ValueError(f"Unsupported file type: {self.dataset_def.url}")
self.df = df
class MetaReferenceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate):
def __init__(self, config: MetaReferenceDatasetIOConfig) -> None:
self.config = config
# local registry for keeping track of datasets within the provider
self.dataset_infos = {}
async def initialize(self) -> None: ...
async def shutdown(self) -> None: ...
async def register_dataset(
self,
dataset_def: DatasetDef,
) -> None:
dataset_impl = PandasDataframeDataset(dataset_def)
self.dataset_infos[dataset_def.identifier] = DatasetInfo(
dataset_def=dataset_def,
dataset_impl=dataset_impl,
)
async def list_datasets(self) -> List[DatasetDef]:
return [i.dataset_def for i in self.dataset_infos.values()]
async def get_rows_paginated(
self,
dataset_id: str,
rows_in_page: int,
page_token: Optional[str] = None,
filter_condition: Optional[str] = None,
) -> PaginatedRowsResult:
dataset_info = self.dataset_infos.get(dataset_id)
dataset_info.dataset_impl.load()
if page_token is None:
next_page_token = 0
else:
next_page_token = int(page_token)
if rows_in_page == -1:
rows = dataset_info.dataset_impl[next_page_token:]
start = next_page_token
end = min(start + rows_in_page, len(dataset_info.dataset_impl))
rows = dataset_info.dataset_impl[start:end]
return PaginatedRowsResult(
rows=rows,
total_count=len(rows),
next_page_token=str(end),
)

View file

@ -0,0 +1,22 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List
from llama_stack.distribution.datatypes import * # noqa: F403
def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.datasetio,
provider_type="meta-reference",
pip_packages=["pandas"],
module="llama_stack.providers.impls.meta_reference.datasetio",
config_class="llama_stack.providers.impls.meta_reference.datasetio.MetaReferenceDatasetIOConfig",
api_dependencies=[],
),
]

View file

@ -0,0 +1,5 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

View file

@ -0,0 +1,4 @@
providers:
- provider_id: test-meta
provider_type: meta-reference
config: {}

View file

@ -0,0 +1,109 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import os
import pytest
import pytest_asyncio
from llama_stack.apis.datasetio import * # noqa: F403
from llama_stack.distribution.datatypes import * # noqa: F403
from llama_stack.providers.tests.resolver import resolve_impls_for_test
# How to run this test:
#
# 1. Ensure you have a conda with the right dependencies installed. This is a bit tricky
# since it depends on the provider you are testing. On top of that you need
# `pytest` and `pytest-asyncio` installed.
#
# 2. Copy and modify the provider_config_example.yaml depending on the provider you are testing.
#
# 3. Run:
#
# ```bash
# PROVIDER_ID=<your_provider> \
# PROVIDER_CONFIG=provider_config.yaml \
# pytest -s llama_stack/providers/tests/datasetio/test_datasetio.py \
# --tb=short --disable-warnings
# ```
@pytest_asyncio.fixture(scope="session")
async def datasetio_settings():
impls = await resolve_impls_for_test(
Api.datasetio,
)
return {
"datasetio_impl": impls[Api.datasetio],
"datasets_impl": impls[Api.datasets],
}
async def register_dataset(datasets_impl: Datasets):
dataset = DatasetDefWithProvider(
identifier="test_dataset",
provider_id=os.environ["PROVIDER_ID"],
url=URL(
uri="https://openaipublic.blob.core.windows.net/simple-evals/mmlu.csv",
),
columns_schema={},
)
await datasets_impl.register_dataset(dataset)
@pytest.mark.asyncio
async def test_datasets_list(datasetio_settings):
# NOTE: this needs you to ensure that you are starting from a clean state
# but so far we don't have an unregister API unfortunately, so be careful
datasets_impl = datasetio_settings["datasets_impl"]
response = await datasets_impl.list_datasets()
assert isinstance(response, list)
assert len(response) == 0
@pytest.mark.asyncio
async def test_datasets_register(datasetio_settings):
# NOTE: this needs you to ensure that you are starting from a clean state
# but so far we don't have an unregister API unfortunately, so be careful
datasets_impl = datasetio_settings["datasets_impl"]
await register_dataset(datasets_impl)
response = await datasets_impl.list_datasets()
assert isinstance(response, list)
assert len(response) == 1
# register same dataset with same id again will fail
await register_dataset(datasets_impl)
response = await datasets_impl.list_datasets()
assert isinstance(response, list)
assert len(response) == 1
assert response[0].identifier == "test_dataset"
@pytest.mark.asyncio
async def test_get_rows_paginated(datasetio_settings):
datasetio_impl = datasetio_settings["datasetio_impl"]
datasets_impl = datasetio_settings["datasets_impl"]
await register_dataset(datasets_impl)
response = await datasetio_impl.get_rows_paginated(
dataset_id="test_dataset",
rows_in_page=3,
)
assert isinstance(response.rows, list)
assert len(response.rows) == 3
assert response.next_page_token == "3"
# iterate over all rows
response = await datasetio_impl.get_rows_paginated(
dataset_id="test_dataset",
rows_in_page=10,
page_token=response.next_page_token,
)
assert isinstance(response.rows, list)
assert len(response.rows) == 10
assert response.next_page_token == "13"

View file

@ -0,0 +1,52 @@
version: '2'
built_at: '2024-10-08T17:40:45.325529'
image_name: local
docker_image: null
conda_env: local
apis:
- shields
- safety
- agents
- models
- memory
- memory_banks
- inference
- datasets
- datasetio
providers:
datasetio:
- provider_id: meta0
provider_type: meta-reference
config: {}
inference:
- provider_id: tgi0
provider_type: remote::tgi
config:
url: http://127.0.0.1:5009
memory:
- provider_id: meta-reference
provider_type: meta-reference
config: {}
agents:
- provider_id: meta-reference
provider_type: meta-reference
config:
persistence_store:
namespace: null
type: sqlite
db_path: ~/.llama/runtime/kvstore.db
telemetry:
- provider_id: meta-reference
provider_type: meta-reference
config: {}
safety:
- provider_id: meta-reference
provider_type: meta-reference
config:
llama_guard_shield:
model: Llama-Guard-3-1B
excluded_categories: []
disable_input_check: false
disable_output_check: false
prompt_guard_shield:
model: Prompt-Guard-86M