mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
Remove check to parse either dict or pydantic model
This commit is contained in:
parent
6659ed995a
commit
dd349b2176
1 changed files with 3 additions and 12 deletions
|
@ -392,19 +392,10 @@ class NvidiaPostTrainingAdapter(ModelRegistryHelper):
|
||||||
|
|
||||||
# Handle LoRA-specific configuration
|
# Handle LoRA-specific configuration
|
||||||
if algorithm_config:
|
if algorithm_config:
|
||||||
algorithm_config_dict = (
|
if algorithm_config.type == "LoRA":
|
||||||
algorithm_config.model_dump() if hasattr(algorithm_config, "model_dump") else algorithm_config
|
warn_unsupported_params(algorithm_config, supported_params["lora_config"], "LoRA config")
|
||||||
)
|
|
||||||
if isinstance(algorithm_config_dict, dict) and algorithm_config_dict.get("type") == "LoRA":
|
|
||||||
warn_unsupported_params(algorithm_config_dict, supported_params["lora_config"], "LoRA config")
|
|
||||||
job_config["hyperparameters"]["lora"] = {
|
job_config["hyperparameters"]["lora"] = {
|
||||||
k: v
|
k: v for k, v in {"alpha": algorithm_config.alpha}.items() if v is not None
|
||||||
for k, v in {
|
|
||||||
"adapter_dim": algorithm_config_dict.get("adapter_dim"),
|
|
||||||
"alpha": algorithm_config_dict.get("alpha"),
|
|
||||||
"adapter_dropout": algorithm_config_dict.get("adapter_dropout"),
|
|
||||||
}.items()
|
|
||||||
if v is not None
|
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(f"Unsupported algorithm config: {algorithm_config}")
|
raise NotImplementedError(f"Unsupported algorithm config: {algorithm_config}")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue