mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
fix: Don't require efficiency_config for torchtune (#2104)
# What does this PR do?
Revert a change that by mistake forced efficiency_config on torchtune
provider
users.
```
fix: Don't require efficiency_config for torchtune
It was enforced by mistake when
0751a960a5
merged.
Other asserts made sense in that the code was written, potentially, to
always expect a non-None value. But not efficiency_config.
```
Signed-off-by: Ihar Hrachyshka <ihar.hrachyshka@gmail.com>
This commit is contained in:
parent
7377a5c83e
commit
c219a74fa0
1 changed files with 0 additions and 3 deletions
|
@ -39,7 +39,6 @@ from llama_stack.apis.datasets import Datasets
|
|||
from llama_stack.apis.post_training import (
|
||||
Checkpoint,
|
||||
DataConfig,
|
||||
EfficiencyConfig,
|
||||
LoraFinetuningConfig,
|
||||
OptimizerConfig,
|
||||
QATFinetuningConfig,
|
||||
|
@ -90,8 +89,6 @@ class LoraFinetuningSingleDevice:
|
|||
) -> None:
|
||||
assert isinstance(training_config.data_config, DataConfig), "DataConfig must be initialized"
|
||||
|
||||
assert isinstance(training_config.efficiency_config, EfficiencyConfig), "EfficiencyConfig must be initialized"
|
||||
|
||||
self.job_uuid = job_uuid
|
||||
self.training_config = training_config
|
||||
if not isinstance(algorithm_config, LoraFinetuningConfig):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue