workaround for transformers bug requireing do_sample for saveing pretrained (#1206)

This commit is contained in:
Wing Lian
2024-01-25 11:34:41 -05:00
committed by GitHub
parent badda3783b
commit ba944e6554

View File

@@ -63,6 +63,8 @@ def train(
msg += " and peft_config..."
LOG.debug(msg)
model, peft_config = load_model(cfg, tokenizer, inference=cli_args.inference)
model.generation_config.do_sample = True
model_ref = None
if cfg.rl:
if cfg.adapter and not cfg.rl_adapter_ref_model: