This commit is contained in:
sunny
2024-10-30 11:04:50 -04:00
parent 271c2c2b82
commit 38773d661f
3 changed files with 14 additions and 8 deletions

View File

@@ -895,13 +895,13 @@ class AxolotlTrainer(SchedulerMixin, Trainer):
for key, value in metrics.items():
self._stored_metrics[train_eval][key].append(value)
def _save_checkpoint(self, model, trial, metrics=None):
def _save_checkpoint(self, model, trial):
# make sure the checkpoint dir exists, since trainer is flakey
checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
run_dir = self._get_output_dir(trial=trial)
output_dir = os.path.join(run_dir, checkpoint_folder)
os.makedirs(output_dir, exist_ok=True)
return super()._save_checkpoint(model, trial, metrics=metrics)
return super()._save_checkpoint(model, trial)
class AxolotlMambaTrainer(AxolotlTrainer):

View File

@@ -28,16 +28,17 @@ SUPPORTED_MULTIPACK_MODEL_TYPES = [
# def patch_for_multipack(model_type, model_name=None, is_remote_code=False):
def patch_for_multipack(model_type, model_name=None):
def patch_for_multipack(model_type, model_name=None, has_remote_code=False):
if model_type == "gemmoe":
patch_remote(model_name, ".configuration_gemmoe", ".modeling_gemmoe")
elif model_type == "deepseek_v2":
patch_remote(model_name, ".configuration_deepseek", ".modeling_deepseek")
# elif hasattr(transformers, "modeling_flash_attention_utils") and not is_remote_code:
elif hasattr(transformers, "modeling_flash_attention_utils"):
transformers.modeling_flash_attention_utils._get_unpad_data = ( # pylint: disable=protected-access
get_unpad_data
)
if not has_remote_code:
transformers.modeling_flash_attention_utils._get_unpad_data = ( # pylint: disable=protected-access
get_unpad_data
)
if model_type == "mixtral" and is_deepspeed_zero3_enabled():
patch_mixtral_moe_forward_zero3()
return

View File

@@ -393,11 +393,16 @@ class ModelLoader:
self.cfg.model_config_type in SUPPORTED_MULTIPACK_MODEL_TYPES
and self.cfg.flash_attention
and self.cfg.sample_packing
):
):
has_remote_code = (
"auto_map" in self.model_config
and self.model_type in self.model_config["auto_map"]
)
patch_for_multipack(
self.cfg.model_config_type,
model_name=self.cfg.base_model,
# is_remote_code=self.cfg.trust_remote_code,
has_remote_code=has_remote_code,
)
if self.cfg.is_llama_derived_model: