* fix: use text_column even when not packing for pretraining * feat: update test to check when not packing * chore: lint * Update src/axolotl/utils/data/pretraining.py Co-authored-by: Wing Lian <wing.lian@gmail.com> --------- Co-authored-by: Wing Lian <wing@axolotl.ai> Co-authored-by: Wing Lian <wing.lian@gmail.com>
71 lines
2.0 KiB
Python
71 lines
2.0 KiB
Python
"""
|
|
E2E tests for llama pretrain
|
|
"""
|
|
|
|
import logging
|
|
import os
|
|
|
|
import pytest
|
|
|
|
from axolotl.cli.args import TrainerCliArgs
|
|
from axolotl.common.datasets import load_datasets
|
|
from axolotl.train import train
|
|
from axolotl.utils.config import normalize_config
|
|
from axolotl.utils.dict import DictDefault
|
|
|
|
from .utils import check_model_output_exists
|
|
|
|
LOG = logging.getLogger("axolotl.tests.e2e")
|
|
os.environ["WANDB_DISABLED"] = "true"
|
|
|
|
|
|
class TestPretrainLlama:
|
|
"""
|
|
Test case for Llama models w pretraining
|
|
"""
|
|
|
|
@pytest.mark.parametrize(
|
|
"sample_packing",
|
|
[True, False],
|
|
)
|
|
def test_pretrain(self, temp_dir, sample_packing):
|
|
# pylint: disable=duplicate-code
|
|
cfg = DictDefault(
|
|
{
|
|
"base_model": "JackFram/llama-68m",
|
|
"tokenizer_type": "LlamaTokenizer",
|
|
"flash_attention": True,
|
|
"sequence_len": 1024,
|
|
"sample_packing": sample_packing,
|
|
"special_tokens": {
|
|
"unk_token": "<unk>",
|
|
"bos_token": "<s>",
|
|
"eos_token": "</s>",
|
|
},
|
|
"pretraining_dataset": [
|
|
{
|
|
"path": "allenai/c4",
|
|
"name": "en",
|
|
"type": "pretrain",
|
|
}
|
|
],
|
|
"max_steps": 5,
|
|
"num_epochs": 1,
|
|
"micro_batch_size": 1,
|
|
"gradient_accumulation_steps": 1,
|
|
"val_set_size": 0.0,
|
|
"output_dir": temp_dir,
|
|
"learning_rate": 0.00001,
|
|
"optimizer": "adamw_torch",
|
|
"lr_scheduler": "cosine",
|
|
"save_safetensors": True,
|
|
"bf16": "auto",
|
|
}
|
|
)
|
|
normalize_config(cfg)
|
|
cli_args = TrainerCliArgs()
|
|
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)
|
|
|
|
train(cfg=cfg, dataset_meta=dataset_meta)
|
|
check_model_output_exists(temp_dir, cfg)
|