softmax+交叉熵损失函数代码实现
python代码实现
''' 输入层:(z1,z2,z3) 输出层:(y1,y2,y3) label:(t1,t2,t3) 反向传播结果:(y1-t1,y2-t2,y3-t3) ''' class SoftmaxWithEntropyLoss(object): def __init__(self): self.loss = None self.y = None self.t = None def forward(self, x, t): self.t = t self.y = softmax(x) self.loss = cross_entropy_error(self.y, self.t) def backward(self, dout): batch_size = self.t.shape[0] if self.t.size == self.y.size: dx = (self.y - self.t) / batch_size else: dx = self.y.copy() dx[np.arange(batch_size), self.t] -= 1 dx = dx / batch_size return dx def cross_entropy_error(y, t): if y.ndim == 1: t = t.reshape(1, t.size) y = y.reshape(1, y.size) if t.size == y.size: t = t.argmax(axis=1) batch_size = y.shape[0] return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size
参考资料
https://blog.csdn.net/qian99/article/details/78046329
《深度学习入门:基于Python的理论与实现》