fix(model): apply gate fp32 only for mixtral (#1241)
* fix(model): apply gate fp32 only for mixtral * Update src/axolotl/utils/models.py * fix gate layer check --------- Co-authored-by: Wing Lian <wing.lian@gmail.com>
This commit is contained in:
@@ -676,7 +676,7 @@ def load_model(
|
||||
if not cfg.fsdp:
|
||||
# FSDP doesn't like mixed Float and BFloat16
|
||||
for name, module in model.named_modules():
|
||||
if any(m in name for m in ["norm", "gate"]):
|
||||
if "norm" in name or name.endswith(".gate"):
|
||||
module.to(torch.float32)
|
||||
if model_config.model_type == "btlm":
|
||||
# don't upcast lm_head for btlm
|
||||
|
||||
Reference in New Issue
Block a user