From 33bbe9b2226efaee046eec8e1d42ec47e540a0b7 Mon Sep 17 00:00:00 2001 From: sunny Date: Thu, 7 Nov 2024 10:52:52 -0500 Subject: [PATCH] test --- test.yml | 75 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 test.yml diff --git a/test.yml b/test.yml new file mode 100644 index 000000000..c4254ea20 --- /dev/null +++ b/test.yml @@ -0,0 +1,75 @@ +base_model: NousResearch/Meta-Llama-3.1-8B + +plugins: + - axolotl.integrations.liger.LigerPlugin +liger_rope: true +liger_rms_norm: true +liger_glu_activation: true +liger_fused_linear_cross_entropy: true + +strict: false + +datasets: + - path: mhenrichsen/alpaca_2k_test + type: alpaca + +dataset_prepared_path: last_run_prepared +val_set_size: 0.02 +output_dir: ./outputs/out + +sequence_len: 4096 +sample_packing: true +pad_to_sequence_len: true + +wandb_project: +wandb_entity: +wandb_watch: +wandb_name: +wandb_log_model: + +gradient_accumulation_steps: 4 +micro_batch_size: 2 +num_epochs: 1 +optimizer: adamw_torch +lr_scheduler: cosine +learning_rate: 2e-5 + +train_on_inputs: false +group_by_length: false +bf16: auto +fp16: +tf32: false + +gradient_checkpointing: true +gradient_checkpointing_kwargs: + use_reentrant: false +early_stopping_patience: +resume_from_checkpoint: +logging_steps: 1 +xformers_attention: +flash_attention: true + +warmup_steps: 100 +evals_per_epoch: 2 +eval_table_size: +saves_per_epoch: 1 +debug: +deepspeed: +weight_decay: 0.0 +fsdp: + - full_shard + - auto_wrap +fsdp_config: + fsdp_limit_all_gathers: true + fsdp_sync_module_states: true + fsdp_offload_params: true + fsdp_use_orig_params: false + fsdp_cpu_ram_efficient_loading: true + fsdp_auto_wrap_policy: TRANSFORMER_BASED_WRAP + fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer + fsdp_state_dict_type: FULL_STATE_DICT + fsdp_sharding_strategy: FULL_SHARD + fsdp_backward_prefetch: BACKWARD_PRE +special_tokens: + pad_token: <|finetune_right_pad_id|> + eos_token: <|eot_id|> \ No newline at end of file