mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 11:50:41 +00:00
async call in separate thread
This commit is contained in:
parent
ae43044a57
commit
adb768f827
5 changed files with 72 additions and 55 deletions
|
|
@ -28,14 +28,17 @@ class MetaReferenceEvalsImpl(Evals):
|
|||
async def run_evals(
|
||||
self,
|
||||
model: str,
|
||||
dataset: str,
|
||||
task: str,
|
||||
dataset: Optional[str] = None,
|
||||
) -> EvaluateResponse:
|
||||
cprint(f"model={model}, dataset={dataset}, task={task}", "red")
|
||||
if not dataset:
|
||||
raise ValueError("dataset must be specified for mete-reference evals")
|
||||
|
||||
dataset = DatasetRegistry.get_dataset(dataset)
|
||||
dataset.load()
|
||||
task_impl = TaskRegistry.get_task(task)(dataset)
|
||||
|
||||
task_impl = TaskRegistry.get_task(task)(dataset)
|
||||
x1 = task_impl.preprocess()
|
||||
|
||||
# TODO: replace w/ batch inference & async return eval job
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue