fix: add dropout check when using lora target param

This commit is contained in:
NanoCode012
2026-02-25 14:51:42 +07:00
parent eb13054672
commit 4657cb7177

View File

@@ -209,6 +209,19 @@ class LoraConfig(BaseModel):
data["lora_dropout"] = 0.0
return data
@model_validator(mode="after")
def validate_lora_target_parameters_dropout(self):
if (
self.lora_target_parameters
and self.lora_dropout
and self.lora_dropout != 0.0
):
raise ValueError(
"lora_dropout must be 0 when lora_target_parameters is set. "
"PEFT's ParamWrapper does not support lora_dropout != 0."
)
return self
class ReLoRAConfig(BaseModel):
"""ReLoRA configuration subset"""