From 4657cb7177cf5ca77bfa93e4dc9f6f18cbaf5f1f Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Wed, 25 Feb 2026 14:51:42 +0700 Subject: [PATCH] fix: add dropout check when using lora target param --- src/axolotl/utils/schemas/peft.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/axolotl/utils/schemas/peft.py b/src/axolotl/utils/schemas/peft.py index a86de7822..5b90fb63f 100644 --- a/src/axolotl/utils/schemas/peft.py +++ b/src/axolotl/utils/schemas/peft.py @@ -209,6 +209,19 @@ class LoraConfig(BaseModel): data["lora_dropout"] = 0.0 return data + @model_validator(mode="after") + def validate_lora_target_parameters_dropout(self): + if ( + self.lora_target_parameters + and self.lora_dropout + and self.lora_dropout != 0.0 + ): + raise ValueError( + "lora_dropout must be 0 when lora_target_parameters is set. " + "PEFT's ParamWrapper does not support lora_dropout != 0." + ) + return self + class ReLoRAConfig(BaseModel): """ReLoRA configuration subset"""