Merge pull request #23 from NanoCode012/patch-1

Fix: Save adapter for lora
This commit is contained in:
Wing Lian
2023-05-09 15:05:58 -04:00
committed by GitHub

View File

@@ -228,8 +228,11 @@ def train(
logging.info(
f"Training Completed!!! Saving pre-trained model to {cfg.output_dir}"
)
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
trainer.save_model(cfg.output_dir)
trainer.save_pretrained(cfg.output_dir)
# trainer.save_model(cfg.output_dir) # TODO this may be needed for deepspeed to work? need to review another time
if __name__ == "__main__":