Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- """ Q: Can you pass 2 gradients for the same variable to the optimizer? """
- import tensorflow as tf
- v = tf.get_variable(name='v', initializer=tf.constant(1.0), dtype=tf.float32)
- l1 = v * 2
- l2 = v * 4
- optimizer = tf.train.AdamOptimizer()
- g1 = optimizer.compute_gradients(loss=l1, var_list=[v])
- g2 = optimizer.compute_gradients(loss=l2, var_list=[v])
- apply_g1 = optimizer.apply_gradients(g1)
- apply_g2 = optimizer.apply_gradients(g2)
- apply_both = optimizer.apply_gradients(g1 + g2)
- # We see below that applying individual updates is not equivalent to averaging the updates when multiple gradients apply to a single variable
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
- print(sess.run(v))
- sess.run(apply_both)
- print(sess.run(v))
- sess.run(apply_both)
- print(sess.run(v))
- sess.run(tf.global_variables_initializer())
- print(sess.run(v))
- sess.run(apply_g1)
- sess.run(apply_g2)
- print(sess.run(v))
- sess.run(apply_g1)
- sess.run(apply_g2)
- print(sess.run(v))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement