use default torch fused adamw optimizer as default as adamw_hf is deprecated (#2425)
* use default torch fused adamw optimizer as default as adamw_hf is deprecated * make sure to have latest packaging installed * bump packagingin requirements.txt too
This commit is contained in:
@@ -507,7 +507,7 @@ class HyperparametersConfig(BaseModel):
|
||||
weight_decay: Optional[float] = 0.0
|
||||
optimizer: Optional[
|
||||
Union[OptimizerNames, CustomSupportedOptimizers]
|
||||
] = OptimizerNames.ADAMW_HF
|
||||
] = OptimizerNames.ADAMW_TORCH_FUSED
|
||||
optim_args: Optional[Union[str, Dict[str, Any]]] = Field(
|
||||
default=None,
|
||||
json_schema_extra={"description": "Optional arguments to supply to optimizer."},
|
||||
|
||||
Reference in New Issue
Block a user