11def build_optimizer(cfg, model):
12 params = []
13 for key, value in model.named_parameters():
14 if not value.requires_grad: continue
15
16 lr = cfg.SOLVER.BASE_LR
17 weight_decay = cfg.SOLVER.WEIGHT_DECAY
18 if "heads" in key:
19 lr *= cfg.SOLVER.HEADS_LR_FACTOR
20 else:
21 if 'bn' in key or 'downsample.1' in key:
22 lr *= cfg.SOLVER.BACKBONE_BN_LR_FACTOR
23 elif 'backbone.1' in key and isinstance(model.backbone[1], nn.BatchNorm2d):
24 lr *= cfg.SOLVER.BACKBONE_BN_LR_FACTOR
25 if "bias" in key:
26 lr *= cfg.SOLVER.BIAS_LR_FACTOR
27 weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
28 params += [{"name": key, "params": [value], "lr": lr,
29 "weight_decay": weight_decay, "freeze": False}]
30
31 solver_opt = cfg.SOLVER.OPT
32
33 if solver_opt == "SGD": opt_fns = getattr(optim, solver_opt)(params, momentum=cfg.SOLVER.MOMENTUM)
34 else: opt_fns = getattr(optim, solver_opt)(params)
35
36 return opt_fns
37
38