From 4c500f583010ef0ddaf91dd66428d87c66dccd30 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 27 May 2023 08:43:48 -0400 Subject: [PATCH 1/2] checking for False is not sufficent for NoneType/unset configs --- src/axolotl/utils/validation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/axolotl/utils/validation.py b/src/axolotl/utils/validation.py index 2fe7f99db..ebc50b5af 100644 --- a/src/axolotl/utils/validation.py +++ b/src/axolotl/utils/validation.py @@ -5,12 +5,12 @@ def validate_config(cfg): if cfg.adapter == "qlora": if cfg.merge_lora: # can't merge qlora if loaded in 8bit or 4bit - assert cfg.load_in_8bit is False - assert cfg.load_4bit is False + assert cfg.load_in_8bit is not True + assert cfg.load_4bit is not True assert cfg.load_in_4bit is False else: - assert cfg.load_in_8bit is False - assert cfg.load_4bit is False + assert cfg.load_in_8bit is not True + assert cfg.load_4bit is not True assert cfg.load_in_4bit is True if not cfg.load_in_8bit and cfg.adapter == "lora": logging.warning("We recommend setting `load_in_8bit: true` for LORA finetuning") From 4c906339f7f247c9d2b7ead2b2b61730c5971229 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 27 May 2023 08:49:43 -0400 Subject: [PATCH 2/2] fix auto linear modules for lora w/o any set already --- src/axolotl/utils/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 405c9e4b2..4f796dc29 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -364,7 +364,7 @@ def load_lora(model, cfg): PeftModel, ) - lora_target_modules = list(cfg.lora_target_modules) + lora_target_modules = list(cfg.lora_target_modules or []) if cfg.lora_target_linear: bits = None