fix auto linear modules for lora w/o any set already

This commit is contained in:
Wing Lian
2023-05-27 08:49:43 -04:00
parent 4c500f5830
commit 4c906339f7

View File

@@ -364,7 +364,7 @@ def load_lora(model, cfg):
PeftModel,
)
lora_target_modules = list(cfg.lora_target_modules)
lora_target_modules = list(cfg.lora_target_modules or [])
if cfg.lora_target_linear:
bits = None