From e6b299dd79f75537f3e247a3d22dfed5d3885bfb Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Tue, 23 Jul 2024 19:54:15 -0400 Subject: [PATCH] bump flash attention to 2.6.2 (#1781) [skip ci] --- requirements.txt | 2 +- setup.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index a54a42ad9..ec571570b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ fire PyYAML>=6.0 requests datasets==2.19.1 -flash-attn==2.6.1 +flash-attn==2.6.2 sentencepiece wandb einops diff --git a/setup.py b/setup.py index 9e6f34ad8..ceba63669 100644 --- a/setup.py +++ b/setup.py @@ -80,10 +80,10 @@ setup( dependency_links=dependency_links, extras_require={ "flash-attn": [ - "flash-attn==2.6.1", + "flash-attn==2.6.2", ], "fused-dense-lib": [ - "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.1#subdirectory=csrc/fused_dense_lib", + "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.2#subdirectory=csrc/fused_dense_lib", ], "deepspeed": [ "deepspeed @ git+https://github.com/microsoft/DeepSpeed.git@bc48371c5e1fb8fd70fc79285e66201dbb65679b",