diff --git a/examples/yayi2-30b/fft.yml b/examples/yayi2-30b/fft.yml
new file mode 100644
index 000000000..a44abc22d
--- /dev/null
+++ b/examples/yayi2-30b/fft.yml
@@ -0,0 +1,64 @@
+base_model: models/yayi2-30b
+model_type: AutoModelForCausalLM
+tokenizer_type: AutoTokenizer
+is_mistral_derived_model: false
+trust_remote_code: true
+model_revision: refs/pr/5
+
+load_in_8bit: false
+load_in_4bit: false
+strict: false
+
+datasets:
+ - path: mhenrichsen/alpaca_2k_test
+ type: alpaca
+dataset_prepared_path:
+val_set_size: 0.05
+output_dir: ./out
+
+sequence_len: 2048
+sample_packing: false
+pad_to_sequence_len: false
+eval_sample_packing: false
+
+wandb_project:
+wandb_entity:
+wandb_watch:
+wandb_name:
+wandb_log_model:
+
+gradient_accumulation_steps: 1
+micro_batch_size: 1
+num_epochs: 1
+optimizer: adamw_bnb_8bit
+lr_scheduler: cosine
+learning_rate: 0.000005
+
+train_on_inputs: false
+group_by_length: false
+bf16: true
+fp16: false
+tf32: false
+
+gradient_checkpointing: true
+early_stopping_patience:
+resume_from_checkpoint:
+local_rank:
+logging_steps: 1
+xformers_attention:
+flash_attention: true
+
+warmup_steps: 10
+evals_per_epoch: 4
+eval_table_size:
+eval_table_max_new_tokens: 128
+saves_per_epoch: 1
+debug:
+deepspeed: deepspeed/zero3.json
+weight_decay: 0.0
+fsdp:
+fsdp_config:
+special_tokens:
+ bos_token: ""
+ eos_token: ""
+ unk_token: ""