4
4
5
5
6
6
def categorical_focal_loss (alpha = 0.25 , gamma = 2.0 ):
7
- """
8
- https://github.com/umbertogriffo/focal-loss-keras
9
-
10
- Softmax version of focal loss.
11
- When there is a skew between different categories/labels in your data set, you can try to apply this function as a
12
- loss.
13
- m
14
- FL = ∑ -alpha * (1 - p_o,c)^gamma * y_o,c * log(p_o,c)
15
- c=1
16
-
17
- where m = number of classes, c = class and o = observation
18
-
19
- Parameters:
20
- alpha -- the same as weighing factor in balanced cross entropy. Alpha is used to specify the weight of different
21
- categories/labels, the size of the array needs to be consistent with the number of classes.
22
- gamma -- focusing parameter for modulating factor (1-p)
23
-
24
- Default value:
25
- gamma -- 2.0 as mentioned in the paper
26
- alpha -- 0.25 as mentioned in the paper
27
-
28
- References:
29
- Official paper: https://arxiv.org/pdf/1708.02002.pdf
30
- https://www.tensorflow.org/api_docs/python/tf/keras/backend/categorical_crossentropy
31
-
32
- Usage:
33
- model.compile(loss=[categorical_focal_loss(alpha=.25, gamma=2)], metrics=["accuracy"], optimizer=adam)
34
- """
35
-
36
- # def categorical_focal_loss_fixed(y_true, y_pred):
37
-
38
7
def focal_loss_fixed (y_true , y_pred ):
39
8
pt_1 = tf .where (tf .equal (y_true , 1 ), y_pred , tf .ones_like (y_pred ))
40
9
pt_0 = tf .where (tf .equal (y_true , 0 ), y_pred , tf .zeros_like (y_pred ))
@@ -43,33 +12,6 @@ def focal_loss_fixed(y_true, y_pred):
43
12
) - K .mean ((1 - alpha ) * K .pow (pt_0 , gamma ) * K .log (1.0 - pt_0 + K .epsilon ()))
44
13
45
14
return focal_loss_fixed
46
- # """
47
- # :param y_true: A tensor of the same shape as `y_pred`
48
- # :param y_pred: A tensor resulting from a softmax
49
- # :return: Output tensor.
50
- # """
51
- # y_true = tf.cast(y_true, tf.float32)
52
- # # Define epsilon so that the back-propagation will not result in NaN for 0 divisor case
53
- # epsilon = K.epsilon()
54
- # # Add the epsilon to prediction value
55
- # # y_pred = y_pred + epsilon
56
- # # Clip the prediciton value
57
- # y_pred = K.clip(y_pred, epsilon, 1.0 - epsilon)
58
- # # Calculate p_t
59
- # p_t = tf.where(K.equal(y_true, 1), y_pred, 1 - y_pred)
60
- # # Calculate alpha_t
61
- # alpha_factor = K.ones_like(y_true) * alpha
62
- # alpha_t = tf.where(K.equal(y_true, 1), alpha_factor, 1 - alpha_factor)
63
- # # Calculate cross entropy
64
- # cross_entropy = -K.log(p_t)
65
- # weight = alpha_t * K.pow((1 - p_t), gamma)
66
- # # Calculate focal loss
67
- # loss = weight * cross_entropy
68
- # # Sum the losses in mini_batch
69
- # loss = K.mean(K.sum(loss, axis=-1))
70
- # return loss
71
-
72
- # return tf.keras.losses.BinaryFocalCrossentropy(alpha=alpha, gamma=gamma)
73
15
74
16
75
17
def dice_coef (y_true , y_pred ):
@@ -145,50 +87,3 @@ def loss(y_true, y_pred):
145
87
return dice + focal_loss
146
88
147
89
return loss
148
-
149
-
150
- def dyn_weighted_bincrossentropy (true , pred ):
151
- """
152
- Calculates weighted binary cross entropy. The weights are determined dynamically
153
- by the balance of each category. This weight is calculated for each batch.
154
-
155
- The weights are calculted by determining the number of 'pos' and 'neg' classes
156
- in the true labels, then dividing by the number of total predictions.
157
-
158
- For example if there is 1 pos class, and 99 neg class, then the weights are 1/100 and 99/100.
159
- These weights can be applied so false negatives are weighted 99/100, while false postives are weighted
160
- 1/100. This prevents the classifier from labeling everything negative and getting 99% accuracy.
161
-
162
- This can be useful for unbalanced catagories.
163
-
164
- """
165
- # get the total number of inputs
166
- num_pred = K .sum (K .cast (pred < 0.5 , true .dtype )) + K .sum (true )
167
-
168
- # get weight of values in 'pos' category
169
- zero_weight = K .sum (true ) / num_pred + K .epsilon ()
170
-
171
- # get weight of values in 'false' category
172
- one_weight = K .sum (K .cast (pred < 0.5 , true .dtype )) / num_pred + K .epsilon ()
173
-
174
- # calculate the weight vector
175
- weights = (1.0 - true ) * zero_weight + true * one_weight
176
-
177
- # calculate the binary cross entropy
178
- bin_crossentropy = K .binary_crossentropy (true , pred )
179
-
180
- # apply the weights
181
- weighted_bin_crossentropy = weights * bin_crossentropy
182
-
183
- return K .mean (weighted_bin_crossentropy )
184
-
185
-
186
- def dice_coef_nosq (y_true , y_pred ):
187
- smooth = K .epsilon ()
188
- y_true_f = K .flatten (y_true )
189
- y_pred_f = K .flatten (y_pred )
190
- intersection = K .sum (y_true_f * y_pred_f )
191
- dice = (2.0 * intersection + smooth ) / (
192
- K .sum (K .square (y_true_f )) + K .sum (K .square (y_pred_f )) + smooth
193
- )
194
- return dice
0 commit comments