Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- from numpy.testing.utils import nulp_diff
- import time
- n = 1024
- v1 = tf.Variable(tf.ones_initializer((n, n)))
- v2 = tf.Variable(tf.ones_initializer((n, n)))
- op = v1.assign(tf.matmul(v1, v2)/n).op
- norm = tf.reduce_sum(v1)
- sess = tf.Session()
- sess.run(tf.initialize_all_variables())
- sess.run(op) # pre-warming
- start_time = time.time()
- sess.run(tf.initialize_all_variables())
- num_iters = 10
- start = time.time()
- for i in range(num_iters):
- sess.run(op)
- elapsed = time.time()-start
- error = nulp_diff(n**2, sess.run(norm))
- total_ops = num_iters*(n**3+n**2*(n-1))
- print("%.2f G ops/sec, error %.2f"%(total_ops/elapsed/10**9,error))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement