Compare commits

...

1 Commits

Author SHA1 Message Date
Wing Lian
718a8f4153 update flash attention to 2.5.5 for gemma 2024-02-21 23:32:44 -05:00
2 changed files with 2 additions and 2 deletions

View File

@@ -11,7 +11,7 @@ fire
PyYAML>=6.0
requests
datasets>=2.15.0
flash-attn==2.3.3
flash-attn==2.5.5
sentencepiece
wandb
einops

View File

@@ -67,7 +67,7 @@ setup(
dependency_links=dependency_links,
extras_require={
"flash-attn": [
"flash-attn==2.5.0",
"flash-attn==2.5.5",
],
"fused-dense-lib": [
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",