Compare commits

...

1 Commits

Author SHA1 Message Date
Wing Lian
718a8f4153 update flash attention to 2.5.5 for gemma 2024-02-21 23:32:44 -05:00
2 changed files with 2 additions and 2 deletions

View File

@@ -11,7 +11,7 @@ fire
PyYAML>=6.0 PyYAML>=6.0
requests requests
datasets>=2.15.0 datasets>=2.15.0
flash-attn==2.3.3 flash-attn==2.5.5
sentencepiece sentencepiece
wandb wandb
einops einops

View File

@@ -67,7 +67,7 @@ setup(
dependency_links=dependency_links, dependency_links=dependency_links,
extras_require={ extras_require={
"flash-attn": [ "flash-attn": [
"flash-attn==2.5.0", "flash-attn==2.5.5",
], ],
"fused-dense-lib": [ "fused-dense-lib": [
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib", "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",