workaround for transformers bug requireing do_sample for saveing pretrained (#1206)
This commit is contained in:
@@ -63,6 +63,8 @@ def train(
|
||||
msg += " and peft_config..."
|
||||
LOG.debug(msg)
|
||||
model, peft_config = load_model(cfg, tokenizer, inference=cli_args.inference)
|
||||
model.generation_config.do_sample = True
|
||||
|
||||
model_ref = None
|
||||
if cfg.rl:
|
||||
if cfg.adapter and not cfg.rl_adapter_ref_model:
|
||||
|
||||
Reference in New Issue
Block a user