Don't save full model for lora

This commit is contained in:
NanoCode012
2023-05-10 03:18:38 +09:00
committed by GitHub
parent 71a1f7f38c
commit cd2395987e

View File

@@ -228,11 +228,12 @@ def train(
logging.info(
f"Training Completed!!! Saving pre-trained model to {cfg.output_dir}"
)
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
trainer.save_model(cfg.output_dir)
if cfg.adapter == 'lora':
trainer.save_pretrained(cfg.output_dir)
else:
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
trainer.save_model(cfg.output_dir)
if __name__ == "__main__":