import numpy as np
import torch.nn as nn
import torch
# https://blog.csdn.net/QLBFA/article/details/107536486
def cross_entropy_error(y, t):
delta = 1e-7 # 添加一个微小值可以防止负无限大(np.log(0))的发生。
return -np.sum(t * np.log(y + delta))
predict = torch.Tensor([[0.1, 0.05, 0.6, 0.0, 0.05, 0.1, 0.0, 0.1, 0.0, 0.0]])
label = torch.tensor([[0.0, 0, 1, 0, 0, 0, 0, 0, 0, 0]])
# numpy
softmax = nn.Softmax(dim=-1)
lossinput = softmax(predict)
print(cross_entropy_error(np.array(lossinput),np.array(label))) #输出0.510825457099338
# pytorch
cross_loss = nn.CrossEntropyLoss()
print(cross_loss(predict, label))