From 718a8f41531d383272040d7d33cf12eca89061c6 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 21 Feb 2024 23:32:44 -0500 Subject: [PATCH] update flash attention to 2.5.5 for gemma --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index a5986fa4f..f6c40a0b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ fire PyYAML>=6.0 requests datasets>=2.15.0 -flash-attn==2.3.3 +flash-attn==2.5.5 sentencepiece wandb einops diff --git a/setup.py b/setup.py index d4a39b76e..516ffd624 100644 --- a/setup.py +++ b/setup.py @@ -67,7 +67,7 @@ setup( dependency_links=dependency_links, extras_require={ "flash-attn": [ - "flash-attn==2.5.0", + "flash-attn==2.5.5", ], "fused-dense-lib": [ "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",