From 4afb2656b30f0889bf7115dc642e2dd1e0ab13a9 Mon Sep 17 00:00:00 2001 From: sunny Date: Thu, 17 Oct 2024 11:23:47 -0400 Subject: [PATCH] added yml for testing issue 1947 --- examples/llama-3/fft-1b-fsdp.yml | 47 ++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 examples/llama-3/fft-1b-fsdp.yml diff --git a/examples/llama-3/fft-1b-fsdp.yml b/examples/llama-3/fft-1b-fsdp.yml new file mode 100644 index 000000000..66b305420 --- /dev/null +++ b/examples/llama-3/fft-1b-fsdp.yml @@ -0,0 +1,47 @@ +base_model: meta-llama/Llama-3.2-1B + +save_safetensors: true + +datasets: + - path: teknium/GPT4-LLM-Cleaned + type: alpaca + +dataset_prepared_path: ./last_run_prepared + +output_dir: ./outputs/fft-out +sequence_len: 2048 + +gradient_accumulation_steps: 1 +micro_batch_size: 1 +num_epochs: 1 +optimizer: adamw_torch +learning_rate: 2e-5 + +bf16: auto +fp16: +tf32: false + +logging_steps: 10 +xformers_attention: +flash_attention: true + +warmup_steps: 10 +evals_per_epoch: 2 +save_steps: 2 +max_steps: 5 +weight_decay: 0.0 + +fsdp: + - full_shard + - auto_wrap +fsdp_config: + fsdp_limit_all_gathers: true + fsdp_sync_module_states: true + fsdp_offload_params: false + fsdp_use_orig_params: true + fsdp_cpu_ram_efficient_loading: false + fsdp_auto_wrap_policy: TRANSFORMER_BASED_WRAP + fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer + fsdp_state_dict_type: FULL_STATE_DICT + fsdp_sharding_strategy: FULL_SHARD + fsdp_backward_prefetch: BACKWARD_PRE \ No newline at end of file