diff --git a/examples/qwen2-vl/lora-7b.yaml b/examples/qwen2-vl/lora-7b.yaml index a9d737a2e..cdc09ce78 100644 --- a/examples/qwen2-vl/lora-7b.yaml +++ b/examples/qwen2-vl/lora-7b.yaml @@ -26,7 +26,7 @@ pad_to_sequence_len: false lora_r: 32 lora_alpha: 16 lora_dropout: 0.05 -# lora_target_linear: true +lora_target_linear: true #lora_target_modules: 'language_model.model.layers.[\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj' wandb_project: