探究negative log likelihood loss(nll_loss)、cross_entropy与one hot编码

import torch
import numpy as np
import torch.nn.functional as F
import math
from functools import reduce
exp = math.exp
log = math.log

#设置实验数据
input = torch.tensor([[2.5, -0.5, 0.1],
                        [-1.1, 2.5, 0.0],
                        [1.2, 2.2, 3.1]], dtype=torch.float)
labels = torch.tensor([1, 0, 2], dtype=torch.long)

#python实现log_softmax计算
def log_softmax(p):
    m,n = p.size()
    sm_arr = []
    for i in range(m):
        exp_xlist = list(map(lambda x: exp(x),p[i]))
        tmp_sum = reduce(lambda x,y:x+y,exp_xlist)
        tmp_list = [log(x/tmp_sum) for x in exp_xlist]
        tmp_arr = np.array(tmp_list)
        sm_arr.append(tmp_arr)
    sm_arr = np.array(sm_arr)
    return sm_arr

#方法1 先调用自定义log_softmax函数计算softmax,再调用nll_loss函数计算损失;
#(1.1) 先采用自定义log_softmax函数计算softmax
ety = log_softmax(input)
ety = torch.from_numpy(ety).to(torch.float32)

#(1.2) 再调用nll_loss函数计算损失
F.nll_loss(ety,labels)

"""

tensor(2.4258)

"""
#方法2 直接调用cross_entropy函数计算损失,实际上等同于先计算softmax,再计算nll_loss;
F.cross_entropy(input,labels)

"""

tensor(2.4258)

"""
#方法3 先对labels进行one hot编码,再用input计算softmax,最后采用softmax与one hot编码的#target计算nll_loss
#(3.1) 首先,对labels进行one hot编码
def generate_one_hot_coding(cls_num,gtlab):
    sample_num = len(gtlab)
    one_hot_label = []
    for i in range(sample_num):
        row_label = [0]*cls_num
        row_label[gtlab[i]] = 1
        print(row_label)
        one_hot_label.append(row_label)
    return one_hot_label

cls_num = 3
target = generate_one_hot_coding(cls_num,labels)
"""
[0, 1, 0]
[1, 0, 0]
[0, 0, 1]

"""

#(3.2) 采用one hot编码的target计算nll_loss
def NLL_V1(input,target):
    nll = 0
    m = input.size(0)
    for i in range(input.size(0)):
        for j in range(input.size(1)):
            if isinstance(target,torch.Tensor):
                t = target[i][j].numpy()
            else:
                t = target[i][j]
            h = -1*input[i][j].numpy()
            nll = nll + t*h
    nll /= m
    return nll

NLL_V1(ety,target)

"""

2.425835212071737

"""

#方法4 用input计算softmax,最后采用softmax与labels计算nll_loss
def NLL_V2(input,target):
    nll = 0
    m = input.size(0)
    for i in range(input.size(0)):
        t = target[i]
        h = -1*input[i][t].numpy()
        nll = nll + h
    nll /= m
    return nll

NLL_V2(ety,labels)

"""
2.425835212071737

"""

猜你喜欢

转载自blog.csdn.net/u012751110/article/details/104071609