bump versions of deps (#1621)
* bump versions of deps * bump transformers too * fix xformers deps and include s3fs install
This commit is contained in:
@@ -11,7 +11,7 @@ ARG PYTORCH_VERSION="2.1.2"
|
||||
ENV PYTORCH_VERSION=$PYTORCH_VERSION
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev
|
||||
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev s3fs
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
|
||||
packaging==23.2
|
||||
peft==0.10.0
|
||||
transformers @ git+https://github.com/huggingface/transformers.git@43d17c18360ac9c3d3491389328e2fe55fe8f9ce
|
||||
tokenizers==0.15.0
|
||||
bitsandbytes==0.43.0
|
||||
accelerate==0.28.0
|
||||
deepspeed==0.13.1
|
||||
transformers==4.40.2
|
||||
tokenizers==0.19.1
|
||||
bitsandbytes==0.43.1
|
||||
accelerate==0.30.1
|
||||
deepspeed==0.14.2
|
||||
pydantic==2.6.3
|
||||
addict
|
||||
fire
|
||||
PyYAML>=6.0
|
||||
requests
|
||||
datasets==2.15.0
|
||||
flash-attn==2.5.5
|
||||
datasets==2.19.1
|
||||
flash-attn==2.5.8
|
||||
sentencepiece
|
||||
wandb
|
||||
einops
|
||||
xformers==0.0.22
|
||||
xformers==0.0.23.post1
|
||||
optimum==1.16.2
|
||||
hf_transfer
|
||||
colorama
|
||||
|
||||
17
setup.py
17
setup.py
@@ -30,7 +30,7 @@ def parse_requirements():
|
||||
|
||||
try:
|
||||
if "Darwin" in platform.system():
|
||||
_install_requires.pop(_install_requires.index("xformers==0.0.22"))
|
||||
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
||||
else:
|
||||
torch_version = version("torch")
|
||||
_install_requires.append(f"torch=={torch_version}")
|
||||
@@ -45,9 +45,12 @@ def parse_requirements():
|
||||
else:
|
||||
raise ValueError("Invalid version format")
|
||||
|
||||
if (major, minor) >= (2, 1):
|
||||
_install_requires.pop(_install_requires.index("xformers==0.0.22"))
|
||||
_install_requires.append("xformers>=0.0.23")
|
||||
if (major, minor) >= (2, 3):
|
||||
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
||||
_install_requires.append("xformers>=0.0.26.post1")
|
||||
elif (major, minor) >= (2, 2):
|
||||
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
||||
_install_requires.append("xformers>=0.0.25.post1")
|
||||
except PackageNotFoundError:
|
||||
pass
|
||||
|
||||
@@ -68,13 +71,13 @@ setup(
|
||||
dependency_links=dependency_links,
|
||||
extras_require={
|
||||
"flash-attn": [
|
||||
"flash-attn==2.5.5",
|
||||
"flash-attn==2.5.8",
|
||||
],
|
||||
"fused-dense-lib": [
|
||||
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",
|
||||
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.5.8#subdirectory=csrc/fused_dense_lib",
|
||||
],
|
||||
"deepspeed": [
|
||||
"deepspeed==0.13.1",
|
||||
"deepspeed==0.14.2",
|
||||
"deepspeed-kernels",
|
||||
],
|
||||
"mamba-ssm": [
|
||||
|
||||
Reference in New Issue
Block a user