版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/ZK_J1994/article/details/81273673
-
0/1损失函数
-
平方损失函数(线性回归)
-
logloss损失函数/交叉熵损失函数(逻辑回归)
# -*- coding: utf-8 -*-
"""
二分类logloss计算:
sum y(i) * ln[h(x(i); w, b)] + [1 - y(i)]ln[1 - h(x(i); w, b)]
"""
from sklearn.metrics import log_loss
from math import log
def cal_logloss(y_true, y_pred):
""" 二分类logloss计算
:y_true: 真实值, 至少包含一组0, 1
:y_pred: 预测值, 0~1
"""
assert(len(y_pred) == len(y_true))
loss = 0
for i in range(len(y_pred)):
if y_true[i] == 1:
loss += y_true[i] * log(y_pred[i])
elif y_true[i] == 0:
loss += (1 - y_true[i]) * log((1 - y_pred[i]))
else:
raise(Exception('y_true中应该包含0, 1'))
return - loss / len(y_pred)
if __name__ == '__main__':
y_true = [1, 0, 1]
y_pred = [0.9, 0.1, 0.8]
print('>>>sklearn计算的logloss = {}'.format(log_loss(y_true, y_pred)))
print('>>>本程序计算的logloss = {}'.format(cal_logloss(y_true, y_pred)))
-
hinge损失函数
-
指数损失函数(adaboost分类,GBDT分类)