17 def __init__(self, cfg):
18 super().__init__()
19
20 feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM
21 embedding_dim = cfg.MODEL.HEADS.EMBEDDING_DIM
22 num_classes = cfg.MODEL.HEADS.NUM_CLASSES
23 neck_feat = cfg.MODEL.HEADS.NECK_FEAT
24 pool_type = cfg.MODEL.HEADS.POOL_LAYER
25 cls_type = cfg.MODEL.HEADS.CLS_LAYER
26 with_bnneck = cfg.MODEL.HEADS.WITH_BNNECK
27 norm_type = cfg.MODEL.HEADS.NORM
28
29 if pool_type == 'fastavgpool': self.pool_layer = FastGlobalAvgPool2d()
30 elif pool_type == 'avgpool': self.pool_layer = nn.AdaptiveAvgPool2d(1)
31 elif pool_type == 'maxpool': self.pool_layer = nn.AdaptiveMaxPool2d(1)
32 elif pool_type == 'gempoolP': self.pool_layer = GeneralizedMeanPoolingP()
33 elif pool_type == 'gempool': self.pool_layer = GeneralizedMeanPooling()
34 elif pool_type == "avgmaxpool": self.pool_layer = AdaptiveAvgMaxPool2d()
35 elif pool_type == 'clipavgpool': self.pool_layer = ClipGlobalAvgPool2d()
36 elif pool_type == "identity": self.pool_layer = nn.Identity()
37 elif pool_type == "flatten": self.pool_layer = Flatten()
38 else: raise KeyError(f"{pool_type} is not supported!")
39
40
41 self.neck_feat = neck_feat
42
43 bottleneck = []
44 if embedding_dim > 0:
45 bottleneck.append(nn.Conv2d(feat_dim, embedding_dim, 1, 1, bias=False))
46 feat_dim = embedding_dim
47
48 if with_bnneck:
49 bottleneck.append(get_norm(norm_type, feat_dim, bias_freeze=True))
50
51 self.bottleneck = nn.Sequential(*bottleneck)
52
53
54
55 if cls_type == 'linear': self.classifier = nn.Linear(feat_dim, num_classes, bias=False)
56 elif cls_type == 'arcSoftmax': self.classifier = ArcSoftmax(cfg, feat_dim, num_classes)
57 elif cls_type == 'circleSoftmax': self.classifier = CircleSoftmax(cfg, feat_dim, num_classes)
58 elif cls_type == 'amSoftmax': self.classifier = AMSoftmax(cfg, feat_dim, num_classes)
59 else: raise KeyError(f"{cls_type} is not supported!")
60
61
62 self.bottleneck.apply(weights_init_kaiming)
63 self.classifier.apply(weights_init_classifier)
64