注:不知道是否正确
示例一:
import keras.backend as K from keras import Sequential from keras.layers import Dense import numpy as np def getPrecision(y_true, y_pred): TP = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))#TP N = (-1)*K.sum(K.round(K.clip(y_true-K.ones_like(y_true), -1, 0)))#N TN=K.sum(K.round(K.clip((y_true-K.ones_like(y_true))*(y_pred-K.ones_like(y_pred)), 0, 1)))#TN FP=N-TN precision = TP / (TP + FP + K.epsilon())#TT/P return precision def getRecall(y_true, y_pred): TP = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))#TP P=K.sum(K.round(K.clip(y_true, 0, 1))) FN = P-TP #FN=P-TP recall = TP / (TP + FN + K.epsilon())#TP/(TP+FN) return recall
model.compile(optimizer="sgd", loss="categorical_crossentropy",metrics=["acc",getRecall,getPrecision])
来源:https://zhuanlan.zhihu.com/p/38080551
示例二:
from keras import backend as K def Precision(y_true, y_pred): """精确率""" tp= K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) # true positives pp= K.sum(K.round(K.clip(y_pred, 0, 1))) # predicted positives precision = tp/ (pp+ K.epsilon()) return precision def Recall(y_true, y_pred): """召回率""" tp = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) # true positives pp = K.sum(K.round(K.clip(y_true, 0, 1))) # possible positives recall = tp / (pp + K.epsilon()) return recall def F1(y_true, y_pred): """F1-score""" precision = Precision(y_true, y_pred) recall = Recall(y_true, y_pred) f1 = 2 * ((precision * recall) / (precision + recall + K.epsilon())) return f1
来源:https://blog.csdn.net/joleoy/article/details/85787457
本站文章如无特殊说明,均为本站原创,如若转载,请注明出处:keras自定义评价函数 - Python技术站