From 224da88fa21b6bf6b19a805fb2677e0df0017d19 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Tue, 13 May 2025 03:23:53 +0700 Subject: [PATCH] fix: disable auto lora kernel if dropout nonzero (#2655) [skip ci] * fix: disable auto lora kernel if dropout nonzero * Add comment from PR feedback --------- Co-authored-by: Wing Lian --- src/axolotl/utils/schemas/config.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/axolotl/utils/schemas/config.py b/src/axolotl/utils/schemas/config.py index 9db374409..cd9891e04 100644 --- a/src/axolotl/utils/schemas/config.py +++ b/src/axolotl/utils/schemas/config.py @@ -1345,6 +1345,10 @@ class AxolotlConfigWCapabilities(AxolotlInputConfig): ): return data + # Skip if dropout is not 0, as auto enabling it would just disable it during runtime patch checks + if data.get("lora_dropout") != 0: + return data + # Check multi-GPU compatibility capabilities = data.get("capabilities") is_multi_gpu = capabilities and capabilities.get("n_gpu", 0) > 1