-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmetrics.py
211 lines (140 loc) · 5.09 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from scipy.ndimage.morphology import distance_transform_edt
from scipy.ndimage.filters import convolve
def Object(pred, gt):
x = np.mean(pred[gt == 1])
sigma_x = np.std(pred[gt == 1])
score = 2.0 * x / (x ** 2 + 1 + sigma_x + np.finfo(np.float64).eps)
return score
def S_Object(pred, gt):
pred_fg = pred.copy()
pred_fg[gt != 1] = 0.0
O_fg = Object(pred_fg, gt)
pred_bg = (1 - pred.copy())
pred_bg[gt == 1] = 0.0
O_bg = Object(pred_bg, 1 - gt)
u = np.mean(gt)
Q = u * O_fg + (1 - u) * O_bg
return Q
def centroid(gt):
if np.sum(gt) == 0:
return gt.shape[0] // 2, gt.shape[1] // 2
else:
x, y = np.where(gt == 1)
return int(np.mean(x).round()), int(np.mean(y).round())
def divide(gt, x, y):
LT = gt[:x, :y]
RT = gt[x:, :y]
LB = gt[:x, y:]
RB = gt[x:, y:]
w1 = LT.size / gt.size
w2 = RT.size / gt.size
w3 = LB.size / gt.size
w4 = RB.size / gt.size
return LT, RT, LB, RB, w1, w2, w3, w4
def ssim(pred, gt):
x = np.mean(pred)
y = np.mean(gt)
N = pred.size
sigma_x2 = np.sum((pred - x) ** 2 / (N - 1 + np.finfo(np.float64).eps))
sigma_y2 = np.sum((gt - y) ** 2 / (N - 1 + np.finfo(np.float64).eps))
sigma_xy = np.sum((pred - x) * (gt - y) / (N - 1 + np.finfo(np.float64).eps))
alpha = 4 * x * y * sigma_xy
beta = (x ** 2 + y ** 2) * (sigma_x2 + sigma_y2)
if alpha != 0:
Q = alpha / (beta + np.finfo(np.float64).eps)
elif alpha == 0 and beta == 0:
Q = 1
else:
Q = 0
return Q
def S_Region(pred, gt):
x, y = centroid(gt)
gt1, gt2, gt3, gt4, w1, w2, w3, w4 = divide(gt, x, y)
pred1, pred2, pred3, pred4, _, _, _, _ = divide(pred, x, y)
Q1 = ssim(pred1, gt1)
Q2 = ssim(pred2, gt2)
Q3 = ssim(pred3, gt3)
Q4 = ssim(pred4, gt4)
Q = Q1 * w1 + Q2 * w2 + Q3 * w3 + Q4 * w4
return Q
def fspecial_gauss(size, sigma):
"""Function to mimic the 'fspecial' gaussian MATLAB function
"""
x, y = np.mgrid[-size // 2 + 1:size // 2 + 1, -size // 2 + 1:size // 2 + 1]
g = np.exp(-((x ** 2 + y ** 2) / (2.0 * sigma ** 2)))
return g / g.sum()
def AlignmentTerm(pred, gt):
mu_pred = np.mean(pred)
mu_gt = np.mean(gt)
align_pred = pred - mu_pred
align_gt = gt - mu_gt
align_mat = 2 * (align_gt * align_pred) / (align_gt ** 2 + align_pred ** 2 + np.finfo(np.float64).eps)
return align_mat
def EnhancedAlighmentTerm(align_mat):
enhanced = ((align_mat + 1) ** 2) / 4
return enhanced
""" Loss Functions -------------------------------------- """
class DiceLoss(nn.Module):
def __init__(self, weight=None, size_average=True):
super(DiceLoss, self).__init__()
def forward(self, inputs, targets, smooth=1):
inputs = torch.sigmoid(inputs)
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice = (2. * intersection + smooth) / (inputs.sum() + targets.sum() + smooth)
return 1 - dice
class DiceBCELoss(nn.Module):
def __init__(self, weight=None, size_average=True):
super(DiceBCELoss, self).__init__()
def forward(self, inputs, targets, smooth=1):
inputs = torch.sigmoid(inputs)
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice_loss = 1 - (2. * intersection + smooth) / (inputs.sum() + targets.sum() + smooth)
BCE = F.binary_cross_entropy(inputs, targets, reduction='mean')
Dice_BCE = BCE + dice_loss
return Dice_BCE
class MultiClassBCE(nn.Module):
def __init__(self, weight=None, size_average=True):
super().__init__()
def forward(self, inputs, targets, smooth=1):
loss = []
for i in range(inputs.shape[1]):
yp = inputs[:, i]
yt = targets[:, i]
BCE = F.binary_cross_entropy(yp, yt, reduction='mean')
if i == 0:
loss = BCE
else:
loss += BCE
return loss
""" Metrics ------------------------------------------ """
def precision(y_true, y_pred):
intersection = (y_true * y_pred).sum()
return (intersection + 1e-15) / (y_pred.sum() + 1e-15)
def recall(y_true, y_pred):
intersection = (y_true * y_pred).sum()
return (intersection + 1e-15) / (y_true.sum() + 1e-15)
def F2(y_true, y_pred, beta=2):
p = precision(y_true, y_pred)
r = recall(y_true, y_pred)
return (1 + beta ** 2.) * (p * r) / float(beta ** 2 * p + r + 1e-15)
def dice_score(y_true, y_pred):
return (2 * (y_true * y_pred).sum() + 1e-15) / (y_true.sum() + y_pred.sum() + 1e-15)
def jac_score(y_true, y_pred):
intersection = (y_true * y_pred).sum()
union = y_true.sum() + y_pred.sum() - intersection
return (intersection + 1e-15) / (union + 1e-15)
def mae(y_true, y_pred):
sum = 0
for i in range(len(y_true)):
sum += abs(y_true[i] - y_pred[i])
return sum / len(y_true)
def accuracy(y_true, y_pred):
return np.mean(y_true == y_pred)