Safemotion Lib
Loading...
Searching...
No Matches
cross_entroy_loss.py
Go to the documentation of this file.
1# encoding: utf-8
2"""
3@author: l1aoxingyu
4@contact: sherlockliao01@gmail.com
5"""
6import torch
7import torch.nn.functional as F
8
9from fastreid.utils.events import get_event_storage
10
11
12def log_accuracy(pred_class_logits, gt_classes, topk=(1,)):
13 """
14 Log the accuracy metrics to EventStorage.
15 """
16 bsz = pred_class_logits.size(0)
17 maxk = max(topk)
18 _, pred_class = pred_class_logits.topk(maxk, 1, True, True)
19 pred_class = pred_class.t()
20 correct = pred_class.eq(gt_classes.view(1, -1).expand_as(pred_class))
21
22 ret = []
23 for k in topk:
24 correct_k = correct[:k].view(-1).float().sum(dim=0, keepdim=True)
25 ret.append(correct_k.mul_(1. / bsz))
26
27 storage = get_event_storage()
28 storage.put_scalar("cls_accuracy", ret[0])
29
30
31def cross_entropy_loss(pred_class_logits, gt_classes, eps, alpha=0.2):
32 num_classes = pred_class_logits.size(1)
33
34 if eps >= 0:
35 smooth_param = eps
36 else:
37 # Adaptive label smooth regularization
38 soft_label = F.softmax(pred_class_logits, dim=1)
39 smooth_param = alpha * soft_label[torch.arange(soft_label.size(0)), gt_classes].unsqueeze(1)
40
41 log_probs = F.log_softmax(pred_class_logits, dim=1)
42 with torch.no_grad():
43 targets = torch.ones_like(log_probs)
44 targets *= smooth_param / (num_classes - 1)
45 targets.scatter_(1, gt_classes.data.unsqueeze(1), (1 - smooth_param))
46
47 loss = (-targets * log_probs).sum(dim=1)
48
49 """
50 # confidence penalty
51 conf_penalty = 0.3
52 probs = F.softmax(pred_class_logits, dim=1)
53 entropy = torch.sum(-probs * log_probs, dim=1)
54 loss = torch.clamp_min(loss - conf_penalty * entropy, min=0.)
55 """
56
57 with torch.no_grad():
58 non_zero_cnt = max(loss.nonzero(as_tuple=False).size(0), 1)
59
60 loss = loss.sum() / non_zero_cnt
61
62 return loss
log_accuracy(pred_class_logits, gt_classes, topk=(1,))
cross_entropy_loss(pred_class_logits, gt_classes, eps, alpha=0.2)