full weights fsdp training seems broken with fsdp_cpu_ram_efficient_loading, disabling for now (#1726)

This commit is contained in:
Wing Lian
2024-07-05 09:15:36 -04:00
committed by GitHub
parent c6d83a87c4
commit c69b7eb2b5

View File

@@ -569,9 +569,11 @@ def load_model(
try:
skip_move_to_device = False
if (
cfg.fsdp and cfg.fsdp_config.fsdp_cpu_ram_efficient_loading
) and not qlora_fsdp:
if ( # pylint: disable=condition-evals-to-constant)
(cfg.fsdp and cfg.fsdp_config.fsdp_cpu_ram_efficient_loading)
and not qlora_fsdp
and False
):
model = load_sharded_model(
base_model,
model_config,