From 230e0ac363b58f48ff32c7e790fbb17859e32f41 Mon Sep 17 00:00:00 2001 From: Faria Huq Date: Tue, 28 May 2024 11:25:08 -0400 Subject: [PATCH] Fix Lora config error for Llama3 (#1659) The current yml code throws an error: ValueError: Please set lora_modules_to_save to [`embed_tokens`, `lm_head`] when using an adapter and changing the special tokens. I added the required changes to resolve it --- examples/llama-3/lora-8b.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/llama-3/lora-8b.yml b/examples/llama-3/lora-8b.yml index 6b0ebaed8..cd21effb9 100644 --- a/examples/llama-3/lora-8b.yml +++ b/examples/llama-3/lora-8b.yml @@ -24,6 +24,9 @@ lora_alpha: 16 lora_dropout: 0.05 lora_target_linear: true lora_fan_in_fan_out: +lora_modules_to_save: + - embed_tokens + - lm_head wandb_project: wandb_entity: