moving tests around for flash_attn install

This commit is contained in:
Dan Saunders
2024-12-18 19:36:23 +00:00
parent 66176b3e07
commit 1d935f65c3
15 changed files with 52 additions and 52 deletions

View File

@@ -84,9 +84,9 @@ class LlamaDifferentialAttention(nn.Module):
if config.split_heads:
# Split heads mode
assert (
self.base_num_heads % 2 == 0
), "Number of heads must be even for splitting"
# assert (
# self.base_num_heads % 2 == 0
# ), "Number of heads must be even for splitting"
self.heads_per_component = self.base_num_heads // 2
# Single projections