bump flash attention to 2.6.2 (#1781) [skip ci]
This commit is contained in:
@@ -12,7 +12,7 @@ fire
|
||||
PyYAML>=6.0
|
||||
requests
|
||||
datasets==2.19.1
|
||||
flash-attn==2.6.1
|
||||
flash-attn==2.6.2
|
||||
sentencepiece
|
||||
wandb
|
||||
einops
|
||||
|
||||
4
setup.py
4
setup.py
@@ -80,10 +80,10 @@ setup(
|
||||
dependency_links=dependency_links,
|
||||
extras_require={
|
||||
"flash-attn": [
|
||||
"flash-attn==2.6.1",
|
||||
"flash-attn==2.6.2",
|
||||
],
|
||||
"fused-dense-lib": [
|
||||
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.1#subdirectory=csrc/fused_dense_lib",
|
||||
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.6.2#subdirectory=csrc/fused_dense_lib",
|
||||
],
|
||||
"deepspeed": [
|
||||
"deepspeed @ git+https://github.com/microsoft/DeepSpeed.git@bc48371c5e1fb8fd70fc79285e66201dbb65679b",
|
||||
|
||||
Reference in New Issue
Block a user