Safemotion Lib
Loading...
Searching...
No Matches
embedding_head.py
Go to the documentation of this file.
1# encoding: utf-8
2"""
3@author: liaoxingyu
4@contact: sherlockliao01@gmail.com
5"""
6
7import torch.nn.functional as F
8from torch import nn
9
10from fastreid.layers import *
11from fastreid.utils.weight_init import weights_init_kaiming, weights_init_classifier
12from .build import REID_HEADS_REGISTRY
13
14
15@REID_HEADS_REGISTRY.register()
16class EmbeddingHead(nn.Module):
17 def __init__(self, cfg):
18 super().__init__()
19 # fmt: off
20 feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM
21 embedding_dim = cfg.MODEL.HEADS.EMBEDDING_DIM
22 num_classes = cfg.MODEL.HEADS.NUM_CLASSES
23 neck_feat = cfg.MODEL.HEADS.NECK_FEAT
24 pool_type = cfg.MODEL.HEADS.POOL_LAYER
25 cls_type = cfg.MODEL.HEADS.CLS_LAYER
26 with_bnneck = cfg.MODEL.HEADS.WITH_BNNECK
27 norm_type = cfg.MODEL.HEADS.NORM
28
29 if pool_type == 'fastavgpool': self.pool_layer = FastGlobalAvgPool2d()
30 elif pool_type == 'avgpool': self.pool_layer = nn.AdaptiveAvgPool2d(1)
31 elif pool_type == 'maxpool': self.pool_layer = nn.AdaptiveMaxPool2d(1)
32 elif pool_type == 'gempoolP': self.pool_layer = GeneralizedMeanPoolingP()
33 elif pool_type == 'gempool': self.pool_layer = GeneralizedMeanPooling()
34 elif pool_type == "avgmaxpool": self.pool_layer = AdaptiveAvgMaxPool2d()
35 elif pool_type == 'clipavgpool': self.pool_layer = ClipGlobalAvgPool2d()
36 elif pool_type == "identity": self.pool_layer = nn.Identity()
37 elif pool_type == "flatten": self.pool_layer = Flatten()
38 else: raise KeyError(f"{pool_type} is not supported!")
39 # fmt: on
40
41 self.neck_feat = neck_feat
42
43 bottleneck = []
44 if embedding_dim > 0:
45 bottleneck.append(nn.Conv2d(feat_dim, embedding_dim, 1, 1, bias=False))
46 feat_dim = embedding_dim
47
48 if with_bnneck:
49 bottleneck.append(get_norm(norm_type, feat_dim, bias_freeze=True))
50
51 self.bottleneck = nn.Sequential(*bottleneck)
52
53 # identity classification layer
54 # fmt: off
55 if cls_type == 'linear': self.classifier = nn.Linear(feat_dim, num_classes, bias=False)
56 elif cls_type == 'arcSoftmax': self.classifier = ArcSoftmax(cfg, feat_dim, num_classes)
57 elif cls_type == 'circleSoftmax': self.classifier = CircleSoftmax(cfg, feat_dim, num_classes)
58 elif cls_type == 'amSoftmax': self.classifier = AMSoftmax(cfg, feat_dim, num_classes)
59 else: raise KeyError(f"{cls_type} is not supported!")
60 # fmt: on
61
62 self.bottleneck.apply(weights_init_kaiming)
63 self.classifier.apply(weights_init_classifier)
64
65 def forward(self, features, targets=None):
66 """
67 See :class:`ReIDHeads.forward`.
68 """
69 global_feat = self.pool_layer(features)
70 bn_feat = self.bottleneck(global_feat)
71 bn_feat = bn_feat[..., 0, 0]
72
73 # Evaluation
74 # fmt: off
75 if not self.training: return bn_feat
76 # fmt: on
77
78 # Training
79 if self.classifier.__class__.__name__ == 'Linear':
80 cls_outputs = self.classifier(bn_feat)
81 pred_class_logits = F.linear(bn_feat, self.classifier.weight)
82 else:
83 cls_outputs = self.classifier(bn_feat, targets)
84 pred_class_logits = self.classifier.s * F.linear(F.normalize(bn_feat),
85 F.normalize(self.classifier.weight))
86
87 # fmt: off
88 if self.neck_feat == "before": feat = global_feat[..., 0, 0]
89 elif self.neck_feat == "after": feat = bn_feat
90 else: raise KeyError(f"{self.neck_feat} is invalid for MODEL.HEADS.NECK_FEAT")
91 # fmt: on
92
93 return {
94 "cls_outputs": cls_outputs,
95 "pred_class_logits": pred_class_logits,
96 "features": feat,
97 }