Definition at line 472 of file hooks.py.
◆ __init__()
| fastreid.engine.hooks.SWA.__init__ |
( |
| self, |
|
|
int | swa_start, |
|
|
int | swa_freq, |
|
|
float | swa_lr_factor, |
|
|
float | eta_min, |
|
|
| lr_sched = False ) |
Definition at line 473 of file hooks.py.
473 def __init__(self, swa_start: int, swa_freq: int, swa_lr_factor: float, eta_min: float, lr_sched=False, ):
474 self.swa_start = swa_start
475 self.swa_freq = swa_freq
476 self.swa_lr_factor = swa_lr_factor
477 self.eta_min = eta_min
478 self.lr_sched = lr_sched
479
◆ after_step()
| fastreid.engine.hooks.SWA.after_step |
( |
| self | ) |
|
Called after each iteration.
Reimplemented from fastreid.engine.train_loop.HookBase.
Definition at line 494 of file hooks.py.
494 def after_step(self):
495 next_iter = self.trainer.iter + 1
496
497
498 if next_iter > self.swa_start and self.lr_sched:
499 self.scheduler.step()
500
501 is_final = next_iter == self.trainer.max_iter
502 if is_final:
503 self.trainer.optimizer.swap_swa_param()
◆ before_step()
| fastreid.engine.hooks.SWA.before_step |
( |
| self | ) |
|
Called before each iteration.
Reimplemented from fastreid.engine.train_loop.HookBase.
Definition at line 480 of file hooks.py.
480 def before_step(self):
481 is_swa = self.trainer.iter == self.swa_start
482 if is_swa:
483
484 self.trainer.optimizer = optim.SWA(self.trainer.optimizer, self.swa_freq, self.swa_lr_factor)
485 self.trainer.optimizer.reset_lr_to_swa()
486
487 if self.lr_sched:
488 self.scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(
489 optimizer=self.trainer.optimizer,
490 T_0=self.swa_freq,
491 eta_min=self.eta_min,
492 )
493
◆ eta_min
| fastreid.engine.hooks.SWA.eta_min |
◆ lr_sched
| fastreid.engine.hooks.SWA.lr_sched |
◆ scheduler
| fastreid.engine.hooks.SWA.scheduler |
◆ swa_freq
| fastreid.engine.hooks.SWA.swa_freq |
◆ swa_lr_factor
| fastreid.engine.hooks.SWA.swa_lr_factor |
◆ swa_start
| fastreid.engine.hooks.SWA.swa_start |
The documentation for this class was generated from the following file: