fix: force access to lr & weight decay in case not provided to early error

This commit is contained in:
NanoCode012
2025-05-23 12:31:02 +07:00
parent 255acd3da2
commit 7496db524b

View File

@@ -237,8 +237,8 @@ class TrainerBuilderBase(abc.ABC):
if self.cfg.optimizer in custom_supported_optimizers:
# Common optimizer kwargs
optimizer_kwargs = {
"lr": training_args_kwargs.get("learning_rate"),
"weight_decay": training_args_kwargs.get("weight_decay"),
"lr": training_args_kwargs["learning_rate"],
"weight_decay": training_args_kwargs["weight_decay"],
}
# Adam-specific kwargs