13 for key, value
in model.named_parameters():
14 if not value.requires_grad:
continue
16 lr = cfg.SOLVER.BASE_LR
17 weight_decay = cfg.SOLVER.WEIGHT_DECAY
19 lr *= cfg.SOLVER.HEADS_LR_FACTOR
21 if 'bn' in key
or 'downsample.1' in key:
22 lr *= cfg.SOLVER.BACKBONE_BN_LR_FACTOR
23 elif 'backbone.1' in key
and isinstance(model.backbone[1], nn.BatchNorm2d):
24 lr *= cfg.SOLVER.BACKBONE_BN_LR_FACTOR
26 lr *= cfg.SOLVER.BIAS_LR_FACTOR
27 weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
28 params += [{
"name": key,
"params": [value],
"lr": lr,
29 "weight_decay": weight_decay,
"freeze":
False}]
31 solver_opt = cfg.SOLVER.OPT
33 if solver_opt ==
"SGD": opt_fns = getattr(optim, solver_opt)(params, momentum=cfg.SOLVER.MOMENTUM)
34 else: opt_fns = getattr(optim, solver_opt)(params)
41 "optimizer": optimizer,
44 "warmup_factor": cfg.SOLVER.WARMUP_FACTOR,
45 "warmup_iters": cfg.SOLVER.WARMUP_ITERS,
46 "warmup_method": cfg.SOLVER.WARMUP_METHOD,
49 "milestones": cfg.SOLVER.STEPS,
50 "gamma": cfg.SOLVER.GAMMA,
53 "max_iters": cfg.SOLVER.MAX_ITER,
54 "delay_iters": cfg.SOLVER.DELAY_ITERS,
55 "eta_min_lr": cfg.SOLVER.ETA_MIN_LR,
58 return getattr(lr_scheduler, cfg.SOLVER.SCHED)(**scheduler_args)