feat: add apertus model and cce (#3144) [skip ci]
* feat: add apertus, glm4v, glm4v_moe cce * fix: arcee docs * feat: add apertus * feat: added vram usage * fix: add apertus note * feat: update doc on apertus xielu * fix: add monkeypatch for xielu activation issue * fix: simplify env * feat: pin commit * feat: add packing * chore: move patch calling * Update examples/apertus/README.md Co-authored-by: salman <salman.mohammadi@outlook.com> * Update examples/apertus/README.md Co-authored-by: salman <salman.mohammadi@outlook.com> * Update examples/apertus/README.md Co-authored-by: salman <salman.mohammadi@outlook.com> --------- Co-authored-by: salman <salman.mohammadi@outlook.com>
This commit is contained in:
@@ -19,7 +19,7 @@ python scripts/cutcrossentropy_install.py | sh
|
||||
|
||||
- If you are installing from pip
|
||||
```bash
|
||||
pip3 uninstall -y cut-cross-entropy && pip3 install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@c6a32c5"
|
||||
pip3 uninstall -y cut-cross-entropy && pip3 install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@c564afc"
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -35,7 +35,7 @@ LOG = get_logger(__name__)
|
||||
|
||||
_CCE_INSTALL_MESSAGE = (
|
||||
"Please install Axolotl's fork of cut_cross_entropy with transformers support using "
|
||||
'`pip install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@c6a32c5"`'
|
||||
'`pip install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@c564afc"`'
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -68,11 +68,12 @@ class PatchManager:
|
||||
self._apply_self_attention_lora_patch()
|
||||
self._apply_fsdp2_bnb_patches()
|
||||
self._apply_patch_deepspeed_zero3()
|
||||
self._apply_voxtral_patches()
|
||||
self._apply_apertus_patches()
|
||||
|
||||
def apply_post_plugin_pre_model_load_patches(self):
|
||||
"""Apply post plugin-pre_model_load load patches based on config."""
|
||||
self._apply_tiled_mlp(self.cfg.model_config_type)
|
||||
self._apply_voxtral_patches()
|
||||
|
||||
def _apply_transformers_patches(self):
|
||||
from axolotl.monkeypatch.transformers.trainer_loss_calc import (
|
||||
@@ -493,3 +494,12 @@ class PatchManager:
|
||||
apply_deepspeed_patches()
|
||||
except ImportError as e:
|
||||
LOG.warning(f"DeepSpeed patches not applied: {e}")
|
||||
|
||||
def _apply_apertus_patches(self):
|
||||
"""Apply patches for Apertus model."""
|
||||
if self.cfg.model_config_type == "apertus":
|
||||
from axolotl.monkeypatch.models.apertus.activation import (
|
||||
patch_apertus_xielu_activation,
|
||||
)
|
||||
|
||||
patch_apertus_xielu_activation()
|
||||
|
||||
0
src/axolotl/monkeypatch/models/apertus/__init__.py
Normal file
0
src/axolotl/monkeypatch/models/apertus/__init__.py
Normal file
52
src/axolotl/monkeypatch/models/apertus/activation.py
Normal file
52
src/axolotl/monkeypatch/models/apertus/activation.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Monkeypatch for Apertus to dtype mismatch in XIELU act"""
|
||||
|
||||
from torch import Tensor
|
||||
|
||||
|
||||
def patch_apertus_xielu_activation():
|
||||
try:
|
||||
from transformers.activations import XIELUActivation
|
||||
except ImportError as err:
|
||||
raise ImportError(
|
||||
"Cannot import XIELUActivation. "
|
||||
"Please make sure to update your transformers version >= 4.56.1."
|
||||
) from err
|
||||
|
||||
from transformers.activations import logger
|
||||
|
||||
# Store the original method
|
||||
old_fn = XIELUActivation._xielu_cuda
|
||||
|
||||
def _xielu_cuda_fixed(self, x: Tensor) -> Tensor:
|
||||
"""Firewall function to prevent torch.compile from seeing .item() calls"""
|
||||
original_shape = x.shape
|
||||
# CUDA kernel expects 3D tensors, reshape if needed
|
||||
while x.dim() < 3:
|
||||
x = x.unsqueeze(0)
|
||||
if x.dim() > 3:
|
||||
x = x.view(-1, 1, x.size(-1))
|
||||
if original_shape != x.shape:
|
||||
logger.warning_once(
|
||||
"Warning: xIELU input tensor expects 3 dimensions but got (shape: %s). Reshaping to (shape: %s).",
|
||||
original_shape,
|
||||
x.shape,
|
||||
)
|
||||
result = self._xielu_cuda_obj.forward(
|
||||
x,
|
||||
self.alpha_p.to(x.dtype),
|
||||
self.alpha_n.to(x.dtype),
|
||||
# Temporary until xIELU CUDA fully implemented -> self.{beta,eps}.item()
|
||||
self._beta_scalar,
|
||||
self._eps_scalar,
|
||||
self.with_vector_loads,
|
||||
)
|
||||
return result.view(original_shape)
|
||||
|
||||
# Apply the patch
|
||||
XIELUActivation._xielu_cuda = _xielu_cuda_fixed
|
||||
|
||||
def unpatch():
|
||||
"""Restore the original method"""
|
||||
XIELUActivation._xielu_cuda = old_fn
|
||||
|
||||
return unpatch
|
||||
@@ -11,6 +11,7 @@ from axolotl.monkeypatch.mixtral import patch_mixtral_moe_forward_zero3
|
||||
from axolotl.monkeypatch.utils import get_unpad_data
|
||||
|
||||
SUPPORTED_MULTIPACK_MODEL_TYPES = [
|
||||
"apertus",
|
||||
"mllama_text_model",
|
||||
"llama",
|
||||
"llama4",
|
||||
|
||||
Reference in New Issue
Block a user