bump flash attention to 2.6.2 (#1781) [skip ci]

This commit is contained in:
Wing Lian
2024-07-23 19:54:15 -04:00
committed by GitHub
parent 608a2f3180
commit e6b299dd79
2 changed files with 3 additions and 3 deletions

View File

@@ -12,7 +12,7 @@ fire
PyYAML>=6.0
requests
datasets==2.19.1
flash-attn==2.6.1
flash-attn==2.6.2
sentencepiece
wandb
einops

View File

@@ -80,10 +80,10 @@ setup(
dependency_links=dependency_links,
extras_require={
"flash-attn": [
"flash-attn==2.6.1",
"flash-attn==2.6.2",
],
"fused-dense-lib": [
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.1#subdirectory=csrc/fused_dense_lib",
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.2#subdirectory=csrc/fused_dense_lib",
],
"deepspeed": [
"deepspeed @ git+https://github.com/microsoft/DeepSpeed.git@bc48371c5e1fb8fd70fc79285e66201dbb65679b",