allow report_to for multiple providers (#1647)
This commit is contained in:
@@ -1212,11 +1212,14 @@ class HFCausalTrainerBuilder(TrainerBuilderBase):
|
||||
)
|
||||
training_arguments_kwargs["group_by_length"] = self.cfg.group_by_length
|
||||
training_arguments_kwargs["curriculum_sampling"] = self.cfg.curriculum_sampling
|
||||
report_to = None
|
||||
report_to = []
|
||||
if self.cfg.use_wandb:
|
||||
report_to = "wandb"
|
||||
report_to.append("wandb")
|
||||
if self.cfg.use_mlflow:
|
||||
report_to = "mlflow"
|
||||
report_to.append("mlflow")
|
||||
if self.cfg.use_tensorboard:
|
||||
report_to.append("tensorboard")
|
||||
|
||||
training_arguments_kwargs["report_to"] = report_to
|
||||
training_arguments_kwargs["run_name"] = (
|
||||
self.cfg.wandb_name if self.cfg.use_wandb else None
|
||||
|
||||
@@ -212,7 +212,7 @@ class LoraConfig(BaseModel):
|
||||
lora_target_modules: Optional[List[str]] = None
|
||||
lora_target_linear: Optional[bool] = None
|
||||
lora_modules_to_save: Optional[List[str]] = None
|
||||
lora_dropout: Optional[float] = None
|
||||
lora_dropout: Optional[float] = 0.0
|
||||
peft_layers_to_transform: Optional[List[int]] = None
|
||||
peft: Optional[PeftConfig] = None
|
||||
peft_use_dora: Optional[bool] = None
|
||||
@@ -609,6 +609,7 @@ class AxolotlInputConfig(
|
||||
early_stopping_patience: Optional[int] = None
|
||||
load_best_model_at_end: Optional[bool] = False
|
||||
save_only_model: Optional[bool] = False
|
||||
use_tensorboard: Optional[bool] = None
|
||||
|
||||
neftune_noise_alpha: Optional[float] = None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user