mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 13:32:35 +00:00
Merge branch 'main' into post_training_v3
This commit is contained in:
commit
e2a0dce8ad
286 changed files with 13314 additions and 4467 deletions
|
|
@ -12,6 +12,7 @@ from typing import Any, Dict, List, Optional, Protocol, Union
|
|||
from llama_models.schema_utils import json_schema_type, webmethod
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from llama_models.llama3.api.datatypes import * # noqa: F403
|
||||
from llama_stack.apis.common.job_types import JobStatus
|
||||
|
|
@ -80,6 +81,11 @@ class QATFinetuningConfig(BaseModel):
|
|||
group_size: int
|
||||
|
||||
|
||||
AlgorithmConfig = Annotated[
|
||||
Union[LoraFinetuningConfig, LoraFinetuningConfig], Field(discriminator="type")
|
||||
]
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class PostTrainingJobLogStream(BaseModel):
|
||||
"""Stream of logs from a finetuning job."""
|
||||
|
|
@ -166,9 +172,7 @@ class PostTraining(Protocol):
|
|||
description="Model descriptor from `llama model list`",
|
||||
),
|
||||
checkpoint_dir: Optional[str] = None,
|
||||
algorithm_config: Optional[
|
||||
Union[LoraFinetuningConfig, QATFinetuningConfig]
|
||||
] = None,
|
||||
algorithm_config: Optional[AlgorithmConfig] = None,
|
||||
) -> PostTrainingJob: ...
|
||||
|
||||
@webmethod(route="/post-training/preference-optimize", method="POST")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue