Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- g1 = tf.Graph()
- with g1.as_default():
- X1 = tf.placeholder("float", [None, 784])
- w1 = weights([Nv, Nh])
- vb1 = bias([Nv])
- hb1 = bias([Nh])
- print ("w1:", w1.shape)
- print("X1:", X1.shape)
- print("vb1:", vb1.shape)
- print("hb1:",hb1.shape)
- h0_prob = tf.nn.sigmoid(tf.matmul(X1, w1) + hb1)
- h0 = sample_prob(h0_prob)
- h1 = h0
- print ("h1:", h1.shape)
- for step in range(gibbs_sampling_steps):
- v1_prob = tf.nn.sigmoid(tf.matmul(h1, tf.transpose(w1)) + vb1)
- v1 = sample_prob(v1_prob)
- print("v1:", v1.shape)
- h1_prob = tf.nn.sigmoid(tf.matmul(v1, w1) + hb1)
- h1 = sample_prob(h1_prob)
- w1_positive_grad = tf.matmul(v1, h0)
- w1_negative_grad = tf.matmul(v1, h1)
- print ("w1_positive: ", w1_positive_grad.shape)
- print ("w1_negative: ", w1_negative_grad.shape)
- dw1 = (w1_positive_grad - w1_negative_grad) / tf.to_float(tf.shape(X1)[0])
- update_w1 = tf.assign_add(w1, alpha * dw1)
- update_vb1 = tf.assign_add(vb1, alpha * tf.reduce_mean(X1 - v1, 0))
- update_hb1 = tf.assign_add(hb1, alpha * tf.reduce_mean(h0 - h1, 0))
- out1 = (update_w1, update_vb1, update_hb1)
- print ("update_w1: ", update_w1.shape)
- v1_prob = tf.nn.sigmoid(tf.matmul(h1, tf.transpose(update_w1)) + update_vb1)
- v1 = sample_prob(v1_prob)
- print ("v1 last:", v1.shape)
- err1 = X1 - v1
- print("error: ", err1.shape)
- err_sum1 = tf.reduce_mean(err1 * err1)
- initialize1 = tf.global_variables_initializer()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement