From 1b54af8e544d1130bac17db0374c0e31bdf6621e Mon Sep 17 00:00:00 2001 From: bursteratom Date: Fri, 6 Dec 2024 15:27:18 -0500 Subject: [PATCH] lora config --- examples/qwen2-vl/lora-7b.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/qwen2-vl/lora-7b.yaml b/examples/qwen2-vl/lora-7b.yaml index 75c31fb3b..765b0caef 100644 --- a/examples/qwen2-vl/lora-7b.yaml +++ b/examples/qwen2-vl/lora-7b.yaml @@ -27,7 +27,7 @@ lora_r: 32 lora_alpha: 16 lora_dropout: 0.05 # lora_target_linear: true -lora_target_modules: 'language_model.model.layers.[\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj' +lora_target_modules: 'torch.nn.linear' wandb_project: wandb_entity: