upgrade peft v0.17.0 and support for lora target_parameters (#3006)

This commit is contained in:
Wing Lian
2025-08-02 20:24:04 -04:00
committed by GitHub
parent 10946afae7
commit deac7b18a1
3 changed files with 4 additions and 1 deletions

View File

@@ -76,6 +76,7 @@ def load_lora(
config_only: bool = False,
) -> tuple[PreTrainedModel | PeftModel | PeftMixedModel | None, PeftConfig | None]:
lora_target_modules = cfg.lora_target_modules or []
lora_target_parameters = cfg.lora_target_parameters or []
if cfg.lora_target_linear:
linear_names = find_all_linear_names(model)
@@ -106,6 +107,7 @@ def load_lora(
r=cfg.lora_r,
lora_alpha=cfg.lora_alpha,
target_modules=lora_target_modules,
target_parameters=lora_target_parameters,
layers_to_transform=cfg.peft_layers_to_transform,
layers_pattern=cfg.peft_layers_pattern,
lora_dropout=cfg.lora_dropout,

View File

@@ -54,6 +54,7 @@ class LoraConfig(BaseModel):
lora_alpha: int | None = None
lora_fan_in_fan_out: bool | None = None
lora_target_modules: str | list[str] | None = None
lora_target_parameters: str | list[str] | None = None
lora_target_linear: bool | None = Field(
default=None,
json_schema_extra={"description": "If true, will target all linear modules"},