From 7659c001aa58c4bc8095e351dbe43af5c5ab84c0 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sun, 10 Mar 2024 20:49:45 -0400 Subject: [PATCH] support for rslora (#1387) [skip ci] --- src/axolotl/utils/config/models/input/v0_4_1/__init__.py | 1 + src/axolotl/utils/models.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/axolotl/utils/config/models/input/v0_4_1/__init__.py b/src/axolotl/utils/config/models/input/v0_4_1/__init__.py index 3236bc643..11983acd9 100644 --- a/src/axolotl/utils/config/models/input/v0_4_1/__init__.py +++ b/src/axolotl/utils/config/models/input/v0_4_1/__init__.py @@ -179,6 +179,7 @@ class LoraConfig(BaseModel): peft_layers_to_transform: Optional[List[int]] = None peft: Optional[PeftConfig] = None peft_use_dora: Optional[bool] = None + peft_use_relora: Optional[bool] = None lora_on_cpu: Optional[bool] = None gptq: Optional[bool] = None diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 36c9c17e3..53201c996 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -1055,6 +1055,8 @@ def load_lora(model, cfg, inference=False, config_only=False): lora_config_kwargs["init_lora_weights"] = "loftq" if cfg.peft_use_dora: lora_config_kwargs["use_dora"] = cfg.peft_use_dora + if cfg.peft_use_rslora: + lora_config_kwargs["use_rslora"] = cfg.use_rslora lora_config = LoraConfig( r=cfg.lora_r,