update hf deps (#1964)
* update hf deps * remove deprecated set_caching_enabled
This commit is contained in:
@@ -1,11 +1,11 @@
|
||||
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
|
||||
packaging==23.2
|
||||
peft==0.13.0
|
||||
transformers==4.45.1
|
||||
tokenizers>=0.19.1
|
||||
bitsandbytes==0.44.0
|
||||
accelerate==0.34.2
|
||||
datasets==2.21.0
|
||||
peft==0.13.2
|
||||
transformers==4.45.2
|
||||
tokenizers>=0.20.1
|
||||
bitsandbytes==0.44.1
|
||||
accelerate==1.0.0
|
||||
datasets==3.0.1
|
||||
deepspeed==0.14.4
|
||||
pydantic==2.6.3
|
||||
addict
|
||||
|
||||
@@ -11,7 +11,7 @@ import numpy as np
|
||||
import torch
|
||||
import torch.cuda
|
||||
from accelerate.logging import get_logger
|
||||
from datasets import set_caching_enabled
|
||||
from datasets import disable_caching, enable_caching
|
||||
from torch.utils.data import DataLoader, RandomSampler
|
||||
from transformers.utils import is_torch_bf16_gpu_available
|
||||
|
||||
@@ -87,10 +87,10 @@ def trainer_weighted_loss(model_output, labels, shift_labels=True):
|
||||
@contextmanager
|
||||
def disable_datasets_caching():
|
||||
try:
|
||||
set_caching_enabled(False)
|
||||
disable_caching()
|
||||
yield
|
||||
finally:
|
||||
set_caching_enabled(True)
|
||||
enable_caching()
|
||||
|
||||
|
||||
def add_position_ids(sample):
|
||||
|
||||
Reference in New Issue
Block a user