From aefb2fc6815d2489e3b7e14f232888000f848d95 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Sat, 10 Jun 2023 07:46:36 +0900 Subject: [PATCH] Fix backward compat for peft --- src/axolotl/utils/models.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index bbb72446a..433c96dee 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -140,12 +140,18 @@ def load_model( ) replace_peft_model_with_int4_lora_model() - else: - from peft import prepare_model_for_kbit_training except Exception as err: logging.exception(err) raise err + try: + from peft import prepare_model_for_kbit_training + except ImportError: + # For backward compatibility + from peft import ( + prepare_model_for_int8_training as prepare_model_for_kbit_training, + ) + model_kwargs = {} if cfg.adapter == "qlora" and cfg.load_in_4bit: model_kwargs["quantization_config"] = BitsAndBytesConfig(