lora config

This commit is contained in:
bursteratom
2024-12-06 15:23:24 -05:00
parent 13ca7ed087
commit ea8269d2eb

View File

@@ -26,7 +26,7 @@ pad_to_sequence_len: false
lora_r: 32
lora_alpha: 16
lora_dropout: 0.05
# lora_target_linear: true
lora_target_linear: true
#lora_target_modules: 'language_model.model.layers.[\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj'
wandb_project: