address PR feedback

This commit is contained in:
Wing Lian
2023-06-10 14:21:43 -04:00
parent eea2731a5e
commit 0c6f928601
5 changed files with 9 additions and 8 deletions

View File

@@ -505,10 +505,10 @@ def encode_pretraining(tokenizer, max_tokens, examples):
return ret
def load_pretraining_dataset(path, tokenizer, max_tokens=2048):
def load_pretraining_dataset(path, tokenizer, max_tokens=2048, seed=42):
encode = functools.partial(encode_pretraining, tokenizer, max_tokens)
dataset = load_dataset(path, streaming=True, split="train")
dataset = dataset.shuffle(seed=42, buffer_size=10_000)
dataset = dataset.shuffle(seed=seed, buffer_size=10_000)
# TODO dynamically figure out which columns/features to remove
dataset = dataset.map(encode, batched=True, remove_columns=["text", "meta"])
return dataset

View File

@@ -1,7 +1,6 @@
"""Module containing the Trainer class and related functions"""
import importlib
import logging
import math
import os
import sys
@@ -232,7 +231,6 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer):
callbacks.append(SavePeftModelCallback)
if hasattr(model, "use_bettertransformer") and model.use_bettertransformer is True:
logging.info("Setting up SaveBetterTransformerModelCallback.")
callbacks.append(SaveBetterTransformerModelCallback)
data_collator_kwargs = {