mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-10 19:43:16 +00:00
mvp
This commit is contained in:
parent
4f07aca309
commit
3cbe3a72e8
10 changed files with 230 additions and 76 deletions
|
|
@ -3,17 +3,22 @@
|
|||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
from .datasets import CustomDataset
|
||||
from .datasets import CustomDataset, HFDataset
|
||||
|
||||
# TODO: make this into a config based registry
|
||||
DATASETS_REGISTRY = {
|
||||
"mmlu_eval": CustomDataset(
|
||||
"mmlu-simple-eval-en": CustomDataset(
|
||||
name="mmlu_eval",
|
||||
url="https://openaipublic.blob.core.windows.net/simple-evals/mmlu.csv",
|
||||
),
|
||||
"mmmu-accounting": HFDataset(
|
||||
name="mmlu_eval",
|
||||
url="hf://hellaswag?split=validation&trust_remote_code=True",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def get_dataset(dataset_id: str):
|
||||
# get dataset concrete dataset implementation
|
||||
return DATASETS_REGISTRY[dataset_id]
|
||||
dataset = DATASETS_REGISTRY[dataset_id]
|
||||
dataset.load()
|
||||
return dataset
|
||||
|
|
@ -4,23 +4,35 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
import pandas
|
||||
from datasets import Dataset, load_dataset
|
||||
|
||||
|
||||
class BaseDataset:
|
||||
class BaseDataset(ABC):
|
||||
def __init__(self, name: str):
|
||||
self.dataset = None
|
||||
self.dataset_id = name
|
||||
self.type = self.__class__.__name__
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.dataset)
|
||||
|
||||
@abstractmethod
|
||||
def load(self):
|
||||
pass
|
||||
|
||||
|
||||
class CustomDataset(BaseDataset):
|
||||
def __init__(self, name, url):
|
||||
super().__init__(name)
|
||||
self.url = url
|
||||
|
||||
def load(self):
|
||||
if self.dataset:
|
||||
return
|
||||
df = pandas.read_csv(self.url)
|
||||
self.dataset = Dataset.from_pandas(df)
|
||||
|
||||
|
|
@ -28,13 +40,18 @@ class CustomDataset(BaseDataset):
|
|||
class HFDataset(BaseDataset):
|
||||
def __init__(self, name, url):
|
||||
super().__init__(name)
|
||||
# URL following OpenAI's evals - hf://hendrycks_test?name=business_ethics&split=validation
|
||||
self.url = url
|
||||
parsed = urlparse(url)
|
||||
query = parse_qs(parsed.query)
|
||||
query = {k: v[0] for k, v in query.items()}
|
||||
|
||||
def load(self):
|
||||
if self.dataset:
|
||||
return
|
||||
|
||||
parsed = urlparse(self.url)
|
||||
|
||||
if parsed.scheme != "hf":
|
||||
raise ValueError(f"Unknown HF dataset: {url}")
|
||||
raise ValueError(f"Unknown HF dataset: {self.url}")
|
||||
|
||||
query = parse_qs(parsed.query)
|
||||
query = {k: v[0] for k, v in query.items()}
|
||||
path = parsed.netloc
|
||||
self.dataset = load_dataset(path, **query)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue