Compare commits

...

4 Commits

Author SHA1 Message Date
sunny
66a1e209e3 changed yml 2024-10-18 10:46:36 -04:00
sunny
dba033cb5b fixed yml for issue1947 testing 2024-10-17 12:31:11 -04:00
sunny
6a32e9a0df added yml for testing issue 1947 2024-10-17 11:26:32 -04:00
sunny
4afb2656b3 added yml for testing issue 1947 2024-10-17 11:23:47 -04:00

View File

@@ -0,0 +1,50 @@
base_model: meta-llama/Llama-3.1-8B-Instruct
save_safetensors: true
datasets:
- path: teknium/GPT4-LLM-Cleaned
type: alpaca
dataset_prepared_path: ./last_run_prepared
output_dir: ./outputs/fft-out
sequence_len: 8192
gradient_accumulation_steps: 1
micro_batch_size: 1
num_epochs: 1
optimizer: adamw_torch
learning_rate: 2e-5
bf16: auto
fp16:
tf32: false
logging_steps: 2
xformers_attention:
flash_attention: true
warmup_steps: 2
evals_per_epoch: 2
save_steps: 2
max_steps: 2
weight_decay: 0.0
fsdp:
- full_shard
- auto_wrap
fsdp_config:
fsdp_limit_all_gathers: true
fsdp_sync_module_states: true
fsdp_offload_params: false
fsdp_use_orig_params: true
fsdp_cpu_ram_efficient_loading: false
fsdp_auto_wrap_policy: TRANSFORMER_BASED_WRAP
fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer
fsdp_state_dict_type: FULL_STATE_DICT
fsdp_sharding_strategy: FULL_SHARD
fsdp_backward_prefetch: BACKWARD_PRE
special_tokens:
pad_token: "<|end_of_text|>"