bump to latest transformers release

This commit is contained in:
Wing Lian
2025-01-13 10:34:44 -05:00
parent af727eedf7
commit 23389b38b7
3 changed files with 8 additions and 6 deletions

View File

@@ -13,7 +13,8 @@ liger-kernel==0.5.2
packaging==23.2
peft==0.14.0
transformers==4.47.1
# transformers==4.48.1
transformers @ git+https://github.com/huggingface/transformers.git@v4.48-release
tokenizers>=0.21.0
accelerate==1.2.1
datasets==3.2.0

View File

@@ -387,15 +387,13 @@ class ModelLoader:
self.patch_attention()
if self.cfg.model_config_type == "llama":
from axolotl.monkeypatch.trainer_grad_accum import (
from axolotl.monkeypatch.trainer_grad_accum import ( # patch_forward_for_ga,; patch_training_step_for_ga,
patch_flash_attention_forward,
patch_forward_for_ga,
patch_training_step_for_ga,
)
patch_flash_attention_forward()
patch_forward_for_ga()
patch_training_step_for_ga()
# patch_forward_for_ga()
# patch_training_step_for_ga()
if self.cfg.sample_packing and self.cfg.s2_attention:
raise ValueError(

View File

@@ -1,12 +1,15 @@
""""Test module for checking whether the Hugging Face Transformers is working as expected."""
import unittest
import pytest
from axolotl.monkeypatch.trainer_grad_accum import (
check_forward_is_patchable,
check_training_step_is_patchable,
)
@pytest.mark.skip("may not be needed for latest transformers version")
class TestTrainerGAIntegration(unittest.TestCase):
"""llama monkeypatch integration tests."""