theano log softmax 4D

def softmax_4d(x_4d):
    """
    x_4d: a 4D tensor:(batch_size,channels, height, width)
    """
    shape = x_4d.shape
    x_3d = x_4d.reshape((shape[0], shape[1], -1))
    m = T.max(x_3d, axis = 1, keepdims = True)
    rebased_x = x_3d - m
    soft_up = T.exp(rebased_x)
    soft_down = T.sum(soft_up, axis = 1, keepdims = True)
    sm = soft_up / soft_down
    return sm.reshape(x_4d.shape);

def log_softmax_4d(x_4d):
    """
    x_4d: a 4D tensor:(batch_size,channels, height, width)
    """
    shape = x_4d.shape
    x_3d = x_4d.reshape((shape[0], shape[1], -1))
    m = T.max(x_3d, axis = 1, keepdims = True)
    rebased_x = x_3d - m
    lsm_3d = rebased_x - T.log(T.sum(T.exp(rebased_x), axis = 1 , keepdims = True))
    lsm_4d = lsm_3d.reshape(x_4d.shape)
    return lsm_4d


# Reference * https://groups.google.com/forum/#!msg/theano-users/LRmXhTQtKAA/JmdyTzHiAwAJ
posted @ 2017-01-08 15:08  宁静是一种习惯  阅读(642)  评论(0编辑  收藏  举报