diff --git a/src/axolotl/monkeypatch/lora_kernels.py b/src/axolotl/monkeypatch/lora_kernels.py index ef5174ba2..e845dc6ce 100644 --- a/src/axolotl/monkeypatch/lora_kernels.py +++ b/src/axolotl/monkeypatch/lora_kernels.py @@ -149,6 +149,11 @@ def get_attention_cls_from_config(cfg: DictDefault) -> Type[nn.Module]: return MistralAttention + if model_type == "gemma3_text": + from transformers.models.gemma3.modeling_gemma3 import Gemma3Attention + + return Gemma3Attention + try: # Dynamically import the module and attention class module_path = f"transformers.models.{model_type}.modeling_{model_type}"