move folder

This commit is contained in:
Xi Yan 2024-10-05 11:57:21 -07:00
parent 6234dd97d5
commit 041634192a
9 changed files with 18 additions and 6 deletions

View file

@ -42,12 +42,21 @@ class EvaluationClient(Evals):
async def run_main(host: str, port: int): async def run_main(host: str, port: int):
client = EvaluationClient(f"http://{host}:{port}") client = EvaluationClient(f"http://{host}:{port}")
# CustomDataset
# response = await client.run_evals(
# "Llama3.1-8B-Instruct",
# "mmlu-simple-eval-en",
# "mmlu",
# )
# cprint(f"evaluate response={response}", "green")
# Eleuther Eval
response = await client.run_evals( response = await client.run_evals(
"Llama3.1-8B-Instruct", "Llama3.1-8B-Instruct",
"mmlu-simple-eval-en", "PLACEHOLDER_DATASET_NAME",
"mmlu", "mmlu",
) )
cprint(f"evaluate response={response}", "green") cprint(response.metrics["metrics_table"], "red")
def main(host: str, port: int): def main(host: str, port: int):

View file

@ -109,7 +109,7 @@ async def run_main(host: str, port: int, stream: bool):
cprint(f"User>{message.content}", "green") cprint(f"User>{message.content}", "green")
iterator = client.chat_completion( iterator = client.chat_completion(
model="Llama3.1-8B-Instruct", model="Llama3.1-8B-Instruct",
messages=[message, UserMessage(content="write me 3 sentence about the sun.")], messages=[message],
stream=stream, stream=stream,
) )
async for log in EventLogger().log(iterator): async for log in EventLogger().log(iterator):

View file

@ -146,5 +146,5 @@ class MMLUTask(BaseTask):
def aggregate_results(self, eval_results): def aggregate_results(self, eval_results):
return EvaluateResponse( return EvaluateResponse(
metrics={"score": sum(eval_results) / len(eval_results)} metrics={"score": str(sum(eval_results) / len(eval_results))}
) )

View file

@ -33,8 +33,8 @@ def available_providers() -> List[ProviderSpec]:
pip_packages=[ pip_packages=[
"lm-eval", "lm-eval",
], ],
module="llama_stack.providers.adapters.evals.eleuther", module="llama_stack.providers.impls.third_party.evals.eleuther",
config_class="llama_stack.providers.adapters.evals.eleuther.EleutherEvalsImplConfig", config_class="llama_stack.providers.impls.third_party.evals.eleuther.EleutherEvalsImplConfig",
api_dependencies=[ api_dependencies=[
Api.inference, Api.inference,
], ],

View file

@ -15,6 +15,9 @@ api_providers:
evals: evals:
provider_type: eleuther provider_type: eleuther
config: {} config: {}
# evals:
# provider_type: meta-reference
# config: {}
inference: inference:
providers: providers:
- meta-reference - meta-reference