I am able to log the loss at every epoch using Keras Callback after referring this. Is there any way I can compute Confusion Matrix and use it as a metric?
Update I have tried to define the following function to return me the confusion matrix, but this is still not working.
def con_mat(y_true,y_pred):
total_correct_true = K.sum(K.round(K.clip(y_true*y_pred,0,1)))
total_true = K.sum(y_true)
predicted_true = K.sum(K.round(y_pred))
return (total_correct_true)/(total_true+predicted_true)
I think the logic for True positive is correct, but it is not working as expected.
Simply, just pass the following functions to model.compile function:
from keras import backend as K
def recall_m(y_true, y_pred): # TPR
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) # TP
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1))) # P
recall = true_positives / (possible_positives + K.epsilon())
return recall
def precision_m(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) # TP
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1))) # TP + FP
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def f1_m(y_true, y_pred):
precision = precision_m(y_true, y_pred)
recall = recall_m(y_true, y_pred)
return 2*((precision*recall)/(precision+recall+K.epsilon()))
def TP(y_true, y_pred):
tp = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) # TP
y_pos = K.round(K.clip(y_true, 0, 1))
n_pos = K.sum(y_pos)
y_neg = 1 - y_pos
n_neg = K.sum(y_neg)
n = n_pos + n_neg
return tp/n
def TN(y_true, y_pred):
y_pos = K.round(K.clip(y_true, 0, 1))
n_pos = K.sum(y_pos)
y_neg = 1 - y_pos
n_neg = K.sum(y_neg)
n = n_pos + n_neg
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
tn = K.sum(K.round(K.clip(y_neg * y_pred_neg, 0, 1))) # TN
return tn/n
def FP(y_true, y_pred):
y_pos = K.round(K.clip(y_true, 0, 1))
n_pos = K.sum(y_pos)
y_neg = 1 - y_pos
n_neg = K.sum(y_neg)
n = n_pos + n_neg
tn = K.sum(K.round(K.clip(y_neg * y_pred, 0, 1))) # FP
return tn/n
def FN(y_true, y_pred):
y_pos = K.round(K.clip(y_true, 0, 1))
n_pos = K.sum(y_pos)
y_neg = 1 - y_pos
n_neg = K.sum(y_neg)
n = n_pos + n_neg
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
tn = K.sum(K.round(K.clip(y_true * y_pred_neg, 0, 1))) # FN
return tn/n
Then,
model.compile(loss='binary_crossentropy',
optimizer=optimizers.RMSprop(lr=lr),
metrics=['accuracy',f1_m,precision_m, recall_m, TP, TN, FP, FN])
B"H
Here is a great example that I found
https://github.com/chasingbob/keras-visuals/blob/master/visual_callbacks.py
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With