From 37c27aedc130a7a40965ae5289ccc52e4c336c92 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 3 May 2025 01:56:09 -0400 Subject: [PATCH] fsdp embeddings should be float32 per comment --- src/axolotl/utils/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index ba71ea459..8ba26543c 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -1309,7 +1309,7 @@ class ModelLoader: # make sure these are fp32 per Ramesh et al. (2021) embedding_modules = get_linear_embedding_layers(self.cfg.model_config_type) - if not self.cfg.fsdp: + if self.cfg.fsdp: # FSDP doesn't like mixed Float and BFloat16 self.convert_embedding_modules_dtype( embedding_modules,