Fix typo bloat16 -> bfloat16 (#1257)

This commit is contained in:
Chirag Jain
2024-02-06 11:08:14 +05:30
committed by GitHub
parent c7cf3810bd
commit 1072f28874

View File

@@ -322,7 +322,7 @@ def validate_config(cfg):
LOG.warning("BetterTransformers probably doesn't work with PEFT adapters")
if cfg.fp16 or cfg.bf16:
raise ValueError("AMP is not supported with BetterTransformer")
if cfg.float16 is not True and cfg.bloat16 is not True:
if cfg.float16 is not True and cfg.bfloat16 is not True:
LOG.warning(
"You should probably set bfloat16 or float16 to true to "
"load the model in float16 for BetterTransformers"