with tf.GradientTape() as tape:
  w = tf.Variable(tf.constant(3.0))
  loss = tf.pow(w,2)
grad = tape.gradient(loss,w)
print(grad)
#tf.Tensor(6.0, shape=(), dtype=float32)
 tf.one_hot(待转换数据,depth=几分类)
 tf.nn.softmax( )
 assign_sub 
 tf.argmax 

 

posted on 2020-07-21 17:42  知否知否,  阅读(102)  评论(0编辑  收藏  举报