Don't save full model for lora
This commit is contained in:
@@ -228,11 +228,12 @@ def train(
|
||||
logging.info(
|
||||
f"Training Completed!!! Saving pre-trained model to {cfg.output_dir}"
|
||||
)
|
||||
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
||||
trainer.save_model(cfg.output_dir)
|
||||
|
||||
if cfg.adapter == 'lora':
|
||||
trainer.save_pretrained(cfg.output_dir)
|
||||
else:
|
||||
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
||||
trainer.save_model(cfg.output_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Reference in New Issue
Block a user