Merge pull request #21 from NanoCode012/patch-1

Fix: Scheduler and optimizer condition
This commit is contained in:
Wing Lian
2023-05-08 13:34:44 -04:00
committed by GitHub

View File

@@ -104,8 +104,8 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer):
group_by_length=cfg.group_by_length,
report_to="wandb" if cfg.use_wandb else None,
run_name=cfg.wandb_run_id if cfg.use_wandb else None,
optim=cfg.optimizer if cfg.optimizer else None,
lr_scheduler_type=cfg.lr_scheduler if cfg.lr_scheduler not in ("one_cycle", "log_sweep") else "cosine",
optim=cfg.optimizer if cfg.optimizer else "adamw_hf",
lr_scheduler_type=cfg.lr_scheduler if cfg.lr_scheduler and cfg.lr_scheduler not in ("one_cycle", "log_sweep") else "cosine",
weight_decay=cfg.weight_decay if cfg.weight_decay is not None else 0.0,
**training_arguments_kwargs,
)