fix: Enable KD plugin support for PEFT/LoRA adapters (#3207)
- Fix _loss_function attribute not found on base model with PEFT - Fix mismatched attribute name (loss_function vs _loss_function) - Set _loss_function on unwrapped base model for PEFT - Enable previously skipped test_llama_lora_kd test - Add test config fixes for LoRA kernel compatibility Fixes https://github.com/axolotl-ai-cloud/axolotl/issues/3206
This commit is contained in:
@@ -104,7 +104,6 @@ class TestKnowledgeDistillation:
|
||||
temp_dir + "/runs", "train/loss", 1.4, "Train Loss (%s) is too high"
|
||||
)
|
||||
|
||||
@pytest.mark.skip(reason="Chunked KD loss doesn't support PEFT/LoRA")
|
||||
@pytest.mark.parametrize(
|
||||
"load_in_8bit",
|
||||
[True, False],
|
||||
@@ -120,6 +119,10 @@ class TestKnowledgeDistillation:
|
||||
"lora_r": 16,
|
||||
"lora_alpha": 32,
|
||||
"lora_dropout": 0.0,
|
||||
"lora_modules_to_save": ["embed_tokens", "lm_head"],
|
||||
"lora_mlp_kernel": False,
|
||||
"lora_qkv_kernel": False,
|
||||
"lora_o_kernel": False,
|
||||
}
|
||||
| kd_min_cfg
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user