Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import tensorflow as tf
- sess = tf.Session()
- in_vec = tf.constant(np.array([1, 2, 3], dtype='float32'))
- one_hot = tf.constant(np.array([0, 1, 0], dtype='float32'))
- in_grad = tf.gradients(tf.nn.softmax_cross_entropy_with_logits(labels=one_hot, logits=in_vec), in_vec)[0]
- print(sess.run(in_grad))
- print(sess.run(tf.nn.softmax(in_vec) - one_hot))
Add Comment
Please, Sign In to add comment