moving tests around for flash_attn install

This commit is contained in:
Dan Saunders
2024-12-18 19:36:23 +00:00
parent d4e29e5b67
commit 544f2a8a27
15 changed files with 52 additions and 52 deletions

View File

@@ -84,9 +84,9 @@ class LlamaDifferentialAttention(nn.Module):
if config.split_heads:
# Split heads mode
assert (
self.base_num_heads % 2 == 0
), "Number of heads must be even for splitting"
# assert (
# self.base_num_heads % 2 == 0
# ), "Number of heads must be even for splitting"
self.heads_per_component = self.base_num_heads // 2
# Single projections