tensorflow 中 Cross Entropy算法理解

关于tensorflow 中cross entropy 的 numpy实现

 

import tensorflow as tf
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

# Make up some testing data, need to be rank 2

x = np.array([
        [0.,2.,1.],
        [0.,0.,2.]
        ])
label = np.array([
        [0.,1.,0.],
        [0.,0.,1.]
        ])

label2 = np.array([1,2])

# Numpy part #
def sigmoid(logits):
    return (1/(1+np.exp(-logits)))

def softmax(logits):
    sf = np.exp(logits)
    sf = sf/np.sum(sf, axis=1).reshape(-1,1)
    return sf

def cross_entropy2(softmax, labels):
    return -(labels * np.log(softmax) + (1- labels) * ( np.log(1- softmax)))

def cross_entropy(softmax, labels):
    return -np.sum(labels * np.log(softmax),axis=1)

numpy_sig = cross_entropy2( sigmoid(x), label )
numpy_softmax = cross_entropy( softmax(x), label )
print(softmax(x))
print("my sigmoid_cross_entropy_with_logits is \n %s \n "%(numpy_sig))
print("my softmax_cross_entropy_with_logits is \n %s \n "%(numpy_softmax))

# Tensorflow part #

g = tf.Graph()
with g.as_default():
    tf_x = tf.constant(x)
    tf_label = tf.constant(label)
    tf_label2 = tf.constant(label2)
    tf_ret = tf.nn.sigmoid_cross_entropy_with_logits(logits= tf_x,labels=tf_label)
    tf_softmax = tf.nn.softmax_cross_entropy_with_logits(logits= tf_x,labels=tf_label)
    tf_softmax_2 = tf.nn.sparse_softmax_cross_entropy_with_logits(logits= tf_x,labels=tf_label2)

with tf.Session(graph=g) as ss:
    r_sig,r_softmax,r_softmax_sparse = ss.run([tf_ret,tf_softmax,tf_softmax_2])

print("tensorflow sigmoid_cross_entropy_with_logits is \n %s \n "%(r_sig))
print("tensorflow softmax_cross_entropy_with_logits is \n %s \n "%(r_softmax))

print("tensorflow sparse_softmax_cross_entropy_with_logits is \n %s \n "%(r_softmax_sparse))

 

posted on 2017-07-04 13:55  kakamilan  阅读(570)  评论(0编辑  收藏  举报

导航