From 7496db524bcc7ebc30e4bda96f9d42f86e3b6a45 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 23 May 2025 12:31:02 +0700 Subject: [PATCH] fix: force access to lr & weight decay in case not provided to early error --- src/axolotl/core/trainer_builder/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/axolotl/core/trainer_builder/base.py b/src/axolotl/core/trainer_builder/base.py index c3d5faa3c..41bf06498 100644 --- a/src/axolotl/core/trainer_builder/base.py +++ b/src/axolotl/core/trainer_builder/base.py @@ -237,8 +237,8 @@ class TrainerBuilderBase(abc.ABC): if self.cfg.optimizer in custom_supported_optimizers: # Common optimizer kwargs optimizer_kwargs = { - "lr": training_args_kwargs.get("learning_rate"), - "weight_decay": training_args_kwargs.get("weight_decay"), + "lr": training_args_kwargs["learning_rate"], + "weight_decay": training_args_kwargs["weight_decay"], } # Adam-specific kwargs