import torch
import torch.nn as nn
def CrossEntropyLoss_label_smooth(outputs, targets, num_classes=3, epsilon=0.2):
N = targets.size(0)
# torch.Size([8, 10])
# 初始化一个矩阵, 里面的值都是epsilon / (num_classes - 1)
smoothed_labels = torch.full(size=(N, num_classes), fill_value=epsilon / (num_classes - 1))
targets = targets.data
print(targets)
# 为矩阵中的每一行的某个index的位置赋值为1 - epsilon
smoothed_labels.scatter_(dim=1, index=torch.unsqueeze(targets, dim=1), value=1 - epsilon)
# 调用torch的log_softmax
log_prob = nn.functional.log_softmax(outputs, dim=1)
# 用之前得到的smoothed_labels来调整log_prob中每个值
print(smoothed_labels)
loss = - torch.sum(log_prob * smoothed_labels) / N
return loss
if __name__ == '__main__':
input = torch.FloatTensor(8, 3)
label = torch.Tensor([2, 1, 1, 1, 2, 0, 0, 0]).long()
CrossEntropyLoss_label_smooth(input, label)
标签平滑+交叉熵
猜你喜欢
转载自blog.csdn.net/wuxulong123/article/details/121449814
今日推荐
周排行