fixup logging layer

This commit is contained in:
Wing Lian
2025-01-15 21:36:14 -05:00
parent 145664d82c
commit 79ae776102

View File

@@ -69,7 +69,7 @@ def convert_to_rala(
layer_type = type(child).__name__
logger.info(
f"Converting attention layer {layer_idx}: {layer_type} to {attention_class.__name__}"
f"Converting attention layer {decoder_layer_idx}: {layer_type} to {attention_class.__name__}"
)
# Create new diff attn layer