theano log softmax 4D
def softmax_4d(x_4d):
"""
x_4d: a 4D tensor:(batch_size,channels, height, width)
"""
shape = x_4d.shape
x_3d = x_4d.reshape((shape[0], shape[1], -1))
m = T.max(x_3d, axis = 1, keepdims = True)
rebased_x = x_3d - m
soft_up = T.exp(rebased_x)
soft_down = T.sum(soft_up, axis = 1, keepdims = True)
sm = soft_up / soft_down
return sm.reshape(x_4d.shape);
def log_softmax_4d(x_4d):
"""
x_4d: a 4D tensor:(batch_size,channels, height, width)
"""
shape = x_4d.shape
x_3d = x_4d.reshape((shape[0], shape[1], -1))
m = T.max(x_3d, axis = 1, keepdims = True)
rebased_x = x_3d - m
lsm_3d = rebased_x - T.log(T.sum(T.exp(rebased_x), axis = 1 , keepdims = True))
lsm_4d = lsm_3d.reshape(x_4d.shape)
return lsm_4d
# Reference * https://groups.google.com/forum/#!msg/theano-users/LRmXhTQtKAA/JmdyTzHiAwAJ
(END)
Daniel的学习笔记
浙江大学计算机专业15级硕士在读, 方向: Machine Learning, Deep Learning, Computer Vision.
blog内容是我个人的学习笔记, 由于个人水平限制, 肯定有不少错误或遗漏. 若发现, 欢迎留言告知, Thanks!
Daniel的学习笔记
浙江大学计算机专业15级硕士在读, 方向: Machine Learning, Deep Learning, Computer Vision.
blog内容是我个人的学习笔记, 由于个人水平限制, 肯定有不少错误或遗漏. 若发现, 欢迎留言告知, Thanks!