Advertisement
Guest User

Untitled

a guest
May 22nd, 2019
63
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.59 KB | None | 0 0
  1. import tensorflow as tf
  2. import numpy as np
  3.  
  4. labels = tf.convert_to_tensor([1], dtype=tf.int64)
  5. logits = tf.convert_to_tensor([[0,1,2]], dtype=tf.float32)
  6. r = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=labels)
  7.  
  8. # 如果label为one_hot,那么用softmax_cross_entropy_with_logits
  9.  
  10. with tf.Session() as sess:
  11. print ('tf calc: ', sess.run(r)[0])
  12.  
  13. x = [0, 1, 2]
  14. exp_x = list(map(lambda x: np.exp(x), x))
  15. softmax_x = list(map(lambda x: x / sum(exp_x), exp_x))
  16.  
  17. print (softmax_x)
  18. one_hot_label = [0, 1, 0]
  19.  
  20. loss = -np.log(np.dot(softmax_x, one_hot_label))
  21. print ('manual calc: ', loss)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement