Add layers_to_transform for lora_config (#1118)

This commit is contained in:
xzuyn
2024-01-15 21:29:55 -05:00
committed by GitHub
parent 9cd27b2f91
commit 8487b97cf3
4 changed files with 23 additions and 1 deletions

View File

@@ -257,6 +257,11 @@ def validate_config(cfg):
if cfg.adapter == "lora" and (cfg.flash_attn_fuse_qkv or cfg.flash_attn_fuse_mlp):
raise ValueError("Fused modules are not supported with LoRA")
if cfg.adapter and cfg.peft_layers_to_transform and cfg.unfrozen_parameters:
raise ValueError(
"`unfrozen_parameters` used with `peft_layers_to_transform` can have unexpected behavior."
)
if cfg.relora_steps:
if cfg.adapter not in ("lora", "qlora"):
raise ValueError("cfg.adapter must be lora or qlora to use ReLoRA")

View File

@@ -733,6 +733,7 @@ def load_lora(model, cfg, inference=False):
r=cfg.lora_r,
lora_alpha=cfg.lora_alpha,
target_modules=lora_target_modules,
layers_to_transform=cfg.peft_layers_to_transform,
lora_dropout=cfg.lora_dropout,
fan_in_fan_out=cfg.lora_fan_in_fan_out,
modules_to_save=cfg.lora_modules_to_save if cfg.lora_modules_to_save else None,