Safemotion Lib
Loading...
Searching...
No Matches
circle_loss.py
Go to the documentation of this file.
1# encoding: utf-8
2"""
3@author: xingyu liao
4@contact: sherlockliao01@gmail.com
5"""
6
7import torch
8import torch.nn.functional as F
9from torch import nn
10
11from fastreid.utils import comm
12from .utils import concat_all_gather
13
14
15def circle_loss(
16 embedding: torch.Tensor,
17 targets: torch.Tensor,
18 margin: float,
19 alpha: float,) -> torch.Tensor:
20 embedding = nn.functional.normalize(embedding, dim=1)
21
22 if comm.get_world_size() > 1:
23 all_embedding = concat_all_gather(embedding)
24 all_targets = concat_all_gather(targets)
25 else:
26 all_embedding = embedding
27 all_targets = targets
28
29 dist_mat = torch.matmul(all_embedding, all_embedding.t())
30
31 N = dist_mat.size(0)
32 is_pos = all_targets.view(N, 1).expand(N, N).eq(all_targets.view(N, 1).expand(N, N).t()).float()
33
34 # Compute the mask which ignores the relevance score of the query to itself
35 is_pos = is_pos - torch.eye(N, N, device=is_pos.device)
36
37 is_neg = all_targets.view(N, 1).expand(N, N).ne(all_targets.view(N, 1).expand(N, N).t())
38
39 s_p = dist_mat * is_pos
40 s_n = dist_mat * is_neg
41
42 alpha_p = torch.clamp_min(-s_p.detach() + 1 + margin, min=0.)
43 alpha_n = torch.clamp_min(s_n.detach() + margin, min=0.)
44 delta_p = 1 - margin
45 delta_n = margin
46
47 logit_p = - alpha * alpha_p * (s_p - delta_p)
48 logit_n = alpha * alpha_n * (s_n - delta_n)
49
50 loss = nn.functional.softplus(torch.logsumexp(logit_p, dim=1) + torch.logsumexp(logit_n, dim=1)).mean()
51
52 return loss