diff --git a/src/axolotl/monkeypatch/lora_kernels.py b/src/axolotl/monkeypatch/lora_kernels.py index b420a965c..be1e1f2ff 100644 --- a/src/axolotl/monkeypatch/lora_kernels.py +++ b/src/axolotl/monkeypatch/lora_kernels.py @@ -156,6 +156,11 @@ def get_attention_cls_from_config(cfg: DictDefault) -> Type[nn.Module]: return Llama4TextAttention + if model_type == "mistral3": + from transformers.models.mistral.modeling_mistral import MistralAttention + + return MistralAttention + try: # Dynamically import the module and attention class module_path = f"transformers.models.{model_type}.modeling_{model_type}"