Save adapter for lora

This commit is contained in:
NanoCode012
2023-05-10 01:08:22 +09:00
committed by GitHub
parent 02c59832a3
commit 71a1f7f38c

View File

@@ -230,6 +230,9 @@ def train(
)
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
trainer.save_model(cfg.output_dir)
if cfg.adapter == 'lora':
trainer.save_pretrained(cfg.output_dir)
if __name__ == "__main__":