Fix(model): Linear detected and added to target module with rope linear (#738)

* Fix(model): Linear detected and added to target module with rope linear

* fix: exclude layer instead
This commit is contained in:
NanoCode012
2023-10-19 11:13:20 +09:00
committed by GitHub
parent 992d57f20a
commit 440c3ab527

View File

@@ -507,7 +507,11 @@ def find_all_linear_names(model):
cls = (bnb.nn.Linear4bit, bnb.nn.Linear8bitLt, torch.nn.Linear, QuantLinear)
lora_module_names = set()
for name, module in model.named_modules():
if isinstance(module, cls) or "Linear" in module.__class__.__name__:
if (
isinstance(module, cls)
or "Linear" in module.__class__.__name__
and module.__class__.__name__ not in ("LlamaLinearScalingRotaryEmbedding",)
):
names = name.split(".")
lora_module_names.add(names[0] if len(names) == 1 else names[-1])