upgrade flash-attn to 2.8.3 for gpt-oss attn sink support (#3082)

This commit is contained in:
Wing Lian
2025-08-21 15:04:10 -04:00
committed by GitHub
parent 08e517ea48
commit 0fa752e58b
6 changed files with 7 additions and 7 deletions

View File

@@ -118,9 +118,9 @@ def get_package_version():
extras_require = {
"flash-attn": ["flash-attn==2.8.2"],
"flash-attn": ["flash-attn==2.8.3"],
"ring-flash-attn": [
"flash-attn==2.8.2",
"flash-attn==2.8.3",
"ring-flash-attn>=0.1.7",
"yunchang==0.6.0",
],