mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-16 23:03:49 +00:00
score batch
This commit is contained in:
parent
eb572faf6f
commit
3c6555c408
4 changed files with 34 additions and 10 deletions
|
@ -113,7 +113,7 @@ async def run_main(host: str, port: int):
|
|||
input_rows=response.rows,
|
||||
scoring_functions=["equality"],
|
||||
)
|
||||
cprint(f"scoring response={response}", "blue")
|
||||
cprint(f"score response={response}", "blue")
|
||||
|
||||
# test scoring batch using datasetio api
|
||||
scoring_client = ScoringClient(f"http://{host}:{port}")
|
||||
|
@ -121,7 +121,7 @@ async def run_main(host: str, port: int):
|
|||
dataset_id="test-dataset",
|
||||
scoring_functions=["equality"],
|
||||
)
|
||||
cprint(f"scoring response={response}", "blue")
|
||||
cprint(f"score_batch response={response}", "cyan")
|
||||
|
||||
|
||||
def main(host: str, port: int):
|
||||
|
|
|
@ -18,7 +18,8 @@ ScoringResult = Dict[str, Any]
|
|||
|
||||
@json_schema_type
|
||||
class ScoreBatchResponse(BaseModel):
|
||||
dataset_id: str
|
||||
dataset_id: Optional[str] = None
|
||||
results: Dict[str, ScoringResult]
|
||||
|
||||
|
||||
@json_schema_type
|
||||
|
@ -37,7 +38,10 @@ class Scoring(Protocol):
|
|||
|
||||
@webmethod(route="/scoring/score_batch")
|
||||
async def score_batch(
|
||||
self, dataset_id: str, scoring_functions: List[str]
|
||||
self,
|
||||
dataset_id: str,
|
||||
scoring_functions: List[str],
|
||||
save_results_dataset: bool = False,
|
||||
) -> ScoreBatchResponse: ...
|
||||
|
||||
@webmethod(route="/scoring/score")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue