diff --git a/src/axolotl/utils/schemas/peft.py b/src/axolotl/utils/schemas/peft.py index a86de7822..5b90fb63f 100644 --- a/src/axolotl/utils/schemas/peft.py +++ b/src/axolotl/utils/schemas/peft.py @@ -209,6 +209,19 @@ class LoraConfig(BaseModel): data["lora_dropout"] = 0.0 return data + @model_validator(mode="after") + def validate_lora_target_parameters_dropout(self): + if ( + self.lora_target_parameters + and self.lora_dropout + and self.lora_dropout != 0.0 + ): + raise ValueError( + "lora_dropout must be 0 when lora_target_parameters is set. " + "PEFT's ParamWrapper does not support lora_dropout != 0." + ) + return self + class ReLoRAConfig(BaseModel): """ReLoRA configuration subset"""