Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- import numpy as np
- labels = tf.convert_to_tensor([1], dtype=tf.int64)
- logits = tf.convert_to_tensor([[0,1,2]], dtype=tf.float32)
- r = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=labels)
- # 如果label为one_hot,那么用softmax_cross_entropy_with_logits
- with tf.Session() as sess:
- print ('tf calc: ', sess.run(r)[0])
- x = [0, 1, 2]
- exp_x = list(map(lambda x: np.exp(x), x))
- softmax_x = list(map(lambda x: x / sum(exp_x), exp_x))
- print (softmax_x)
- one_hot_label = [0, 1, 0]
- loss = -np.log(np.dot(softmax_x, one_hot_label))
- print ('manual calc: ', loss)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement