@@ -224,6 +224,8 @@ def _clean_optim_lr_pgs(trainer: Trainer) -> List:
224224 lrs_cfg .scheduler .last_epoch = - 1 # type: ignore[union-attr]
225225 if not isinstance (lrs_cfg .scheduler , ReduceLROnPlateau ):
226226 lrs_cfg .scheduler .base_lrs = []
227+ # if hasattr(lrs_cfg.scheduler, "lr_lambdas"):
228+ # lrs_cfg.scheduler.lr_lambdas = []
227229 return orig_num_pgs
228230
229231 def _reconfigure_optimizer_for_phase0 (self , trainer : Trainer ) -> None :
@@ -250,21 +252,23 @@ def _reconfigure_lrs_for_phase0(self, trainer: Trainer, orig_num_pgs: List) -> N
250252 Args:
251253 trainer (Trainer): The :external+pl:class:`~lightning.pytorch.trainer.trainer.Trainer` object.
252254 orig_num_pgs (List): A list of the number of parameter groups pruned for each optimizer (since only a single
253- optimizer is currently supported by FTS, this list will have only a single element in this verison .)
255+ optimizer is currently supported by FTS, this list will have only a single element in this version .)
254256 """
255257 # since we may have added parameter groups (e.g. implementing ``no_decay`` for user), we need to reinitialize
256258 # certain lr_scheduler variables (including type-dependent ones like ``min_lrs`` and ``lr_lambdas``)
257259 if trainer .lr_scheduler_configs :
258260 for lrs_cfg in trainer .lr_scheduler_configs :
261+ # if hasattr(lrs_cfg.scheduler, "lr_lambdas"):
262+ # lrs_cfg.scheduler.lr_lambdas = lrs_cfg.scheduler.lr_lambdas[orig_num_pgs[0] :]
259263 if not isinstance (lrs_cfg .scheduler , ReduceLROnPlateau ):
260264 lrs_cfg .scheduler ._initial_step ()
261265 lrs_cfg .scheduler ._last_lr = [ # type: ignore[union-attr]
262266 group ["lr" ] for group in lrs_cfg .scheduler .optimizer .param_groups
263267 ]
264268 if isinstance (lrs_cfg .scheduler , ReduceLROnPlateau ):
265269 lrs_cfg .scheduler .min_lrs = lrs_cfg .scheduler .min_lrs [orig_num_pgs [0 ] :]
266- elif hasattr (lrs_cfg .scheduler , "lr_lambdas" ):
267- lrs_cfg .scheduler .lr_lambdas = lrs_cfg .scheduler .lr_lambdas [orig_num_pgs [0 ] :]
270+ # elif hasattr(lrs_cfg.scheduler, "lr_lambdas"):
271+ # lrs_cfg.scheduler.lr_lambdas = lrs_cfg.scheduler.lr_lambdas[orig_num_pgs[0] :]
268272
269273 def phase0_optimizer_override (self ) -> None :
270274 """Reconfigure the user-configured optimizer (configured via `configure_optimizers`) to optimize the
0 commit comments