upgrade to flash-attn 2.7.0 (#2048)

This commit is contained in:
Wing Lian
2024-11-14 06:59:25 -05:00
committed by GitHub
parent 5e98cdddac
commit 2d7830fda6
4 changed files with 3 additions and 7 deletions

View File

@@ -35,7 +35,3 @@ RUN git lfs install --skip-repo && \
pip3 install awscli && \
# The base image ships with `pydantic==1.8.2` which is not working
pip3 install -U --no-cache-dir pydantic==1.10.10
RUN if [ "$PYTHON_VERSION" != "2.5.1" ] ; then \
pip3 install flash-attn==2.6.3; \
fi

View File

@@ -44,7 +44,7 @@
"outputs": [],
"source": [
"!pip install -e git+https://github.com/axolotl-ai-cloud/axolotl#egg=axolotl\n",
"!pip install flash-attn==\"2.5.0\"\n",
"!pip install flash-attn==\"2.7.0.post2\"\n",
"!pip install deepspeed==\"0.13.1\"!pip install mlflow==\"2.13.0\""
]
},

View File

@@ -12,7 +12,7 @@ addict
fire
PyYAML>=6.0
requests
flash-attn==2.6.3
flash-attn==2.7.0.post2
sentencepiece
wandb
einops

View File

@@ -105,7 +105,7 @@ setup(
dependency_links=dependency_links,
extras_require={
"flash-attn": [
"flash-attn==2.6.3",
"flash-attn==2.7.0.post2",
],
"deepspeed": [
"deepspeed==0.14.4",