-
Notifications
You must be signed in to change notification settings - Fork 16
Expand file tree
/
Copy pathutils.py
More file actions
103 lines (84 loc) · 3.7 KB
/
utils.py
File metadata and controls
103 lines (84 loc) · 3.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import torch.nn as nn
import torch
import numpy as np
class MarginLoss(nn.Module):
def __init__(self, margin=1.0, target=False):
super(MarginLoss, self).__init__()
self.margin = margin
self.target = target
def forward(self, logits, label):
if not self.target:
one_hot= torch.zeros_like(logits, dtype=torch.uint8)
label = label.reshape(-1,1)
one_hot.scatter_(1, label, 1)
diff = logits[one_hot] - torch.max(logits[~one_hot].view(len(logits),-1), dim=1)[0]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
else:
diff = torch.max(torch.cat((logits[:, :label],logits[:,(label+1):]), dim=1), dim=1)[0] - logits[:, label]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
return margin.mean()
class MarginLoss_Single(nn.Module):
def __init__(self, margin=1.0, target=False):
super(MarginLoss_Single, self).__init__()
self.margin = margin
self.target = target
def forward(self, logits, label):
if not self.target:
if label == 0:
logits_cat = logits[:,(label+1):]
elif label == logits.size()[1] - 1:
logits_cat = logits[:, :label]
else:
logits_cat = torch.cat((logits[:, :label],logits[:,(label+1):]), dim=1)
diff = logits[:,label] - torch.max(logits_cat, dim=1)[0]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
else:
diff = torch.max(torch.cat((logits[:, :label],logits[:,(label+1):]), dim=1), dim=1)[0] - logits[:, label]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
return margin.mean()
class Function(nn.Module):
def __init__(self, model, batch_size=256, margin=0, nlabels=10, target=False):
super(Function, self).__init__()
self.model = model
self.margin = margin
self.target = target
self.batch_size = batch_size
self.current_counts = 0
self.counts = []
self.nlabels = nlabels
def _loss(self, logits, label):
if not self.target:
if label == 0:
logits_cat = logits[:,(label+1):]
elif label == logits.size()[1] - 1:
logits_cat = logits[:, :label]
else:
logits_cat = torch.cat((logits[:, :label],logits[:,(label+1):]), dim=1)
diff = logits[:,label] - torch.max(logits_cat, dim=1)[0]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
else:
diff = torch.max(torch.cat((logits[:, :label],logits[:,(label+1):]), dim=1), dim=1)[0] - logits[:, label]
margin = torch.nn.functional.relu(diff + self.margin, True) - self.margin
return margin
def forward(self, images, label):
if len(images.size())==3:
images = images.unsqueeze(0)
n = len(images)
device = images.device
k = 0
loss = torch.zeros(n, dtype=torch.float32, device=device)
logits = torch.zeros((n, self.nlabels), dtype=torch.float32, device=device)
while k < n:
start = k
end = min(k + self.batch_size, n)
logits[start:end] = self.model(images[start:end])
loss[start:end] = self._loss(logits, label)
k = end
self.current_counts += n
return logits, loss
def new_counter(self):
self.counts.append(self.current_counts)
self.current_counts = 0
def get_average(self, iter=50000):
counts = np.array(self.counts)
return np.mean(counts[counts<iter])