always drop samples that are too long (#452)

This commit is contained in:
Wing Lian
2023-08-21 16:43:33 -04:00
committed by GitHub
parent 5a1985ba24
commit 50682a3c06

View File

@@ -284,15 +284,15 @@ def disable_datasets_caching():
def process_datasets_for_packing(cfg, train_dataset, eval_dataset):
drop_long = partial(drop_long_seq, sequence_len=cfg.sequence_len)
train_dataset = train_dataset.filter(drop_long, num_proc=os.cpu_count())
if eval_dataset:
eval_dataset = eval_dataset.filter(drop_long, num_proc=os.cpu_count())
if cfg.sample_packing:
drop_long = partial(drop_long_seq, sequence_len=cfg.sequence_len)
train_dataset = train_dataset.filter(drop_long, num_proc=os.cpu_count()).map(
add_position_ids, num_proc=os.cpu_count()
)
train_dataset = train_dataset.map(add_position_ids, num_proc=os.cpu_count())
if eval_dataset:
eval_dataset = eval_dataset.filter(drop_long, num_proc=os.cpu_count()).map(
add_position_ids, num_proc=os.cpu_count()
)
eval_dataset = eval_dataset.map(add_position_ids, num_proc=os.cpu_count())
return train_dataset, eval_dataset