add back dynamic=False
This commit is contained in:
committed by
Sung Ching Liu
parent
bdaaba2784
commit
75c565d476
@@ -40,6 +40,7 @@ def patch_flex_wrapper():
|
||||
if not self._is_flex_compiled:
|
||||
self._compiled_flex_attention = torch.compile(
|
||||
flex_attention,
|
||||
dynamic=False,
|
||||
mode="max-autotune-no-cudagraphs",
|
||||
fullgraph=True,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user