• API
• FAQ
• Tools
• Archive
daily pastebin goal
71%
SHARE
TWEET

Untitled

a guest Jan 21st, 2019 76 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. def LeakyReLu(x, alpha=0.1):
2. x = tf.maximum(alpha*x,x)
3. return x
4.
5. def weight_variable(shape):
6. initial = tf.truncated_normal(shape, stddev=0.1)
7. return tf.Variable(initial)
8.
9. with tf.name_scope('Discriminator') as scope:
10. DW_conv1 = weight_variable([5, 1, 16])
11. Db_conv1 = bias_variable([16])
12. DW_conv2 = weight_variable([5, 16, 32])
13. Db_conv2 = bias_variable([32])
14. DW_conv3 = weight_variable([5, 32, 64])
15. Db_conv3 = bias_variable([64])
16. DW_conv4 = weight_variable([5, 64, 128])
17. Db_conv4 = bias_variable([128])
18. DW_conv5 = weight_variable([5, 128, 256])
19. Db_conv5 = bias_variable([256])
20.
21. DW = weight_variable([5 * 256, 1])
22. Db = bias_variable([1])
23. D_variables = [DW_conv1, Db_conv1, DW_conv2, Db_conv2,
24.                DW_conv3, Db_conv3, DW_conv4, Db_conv4,
25.                DW_conv5, Db_conv5, DW, Db]
26. def D(X):
27.     X = LeakyReLu(conv1d(X, DW_conv1, 2) + Db_conv1)
28.     X = LeakyReLu(conv1d(X, DW_conv2, 5) + Db_conv2)
29.     X = LeakyReLu(conv1d(X, DW_conv3, 2) + Db_conv3)
30.     X = LeakyReLu(conv1d(X, DW_conv4, 5) + Db_conv4)
31.     X = LeakyReLu(conv1d(X, DW_conv5, 2) + Db_conv5)
32.
33.     X = tf.reshape(X, [-1, 5 * 256])
34.     X = X = tf.nn.tanh(tf.matmul(X, DW) + Db)
35.     return X
36.
37. W_conv5 = tf.Variable(tf.constant(0.1, shape=[5, 128, 256]), name="W_conv5")
38. b_conv5 = tf.Variable(tf.constant(0.1, shape=[256]), name="b_conv5")
39. saver = tf.train.Saver({'W_conv5': W_conv5, 'b_conv5': b_conv5})
40. saver.restore(sess, FLAGS.extractor_dir)
41. W_conv5 = tf.reshape(W_conv5[0:5,0,0], [5,1,1])
42. b_conv5 = tf.reshape(b_conv5[0], [1])
43. W_conv5 = tf.constant(W_conv5.eval(session=sess))
44. b_conv5 = tf.constant(b_conv5.eval(session=sess))
45.
46. def C(X):
47.  Con = tf.nn.conv1d(X, W_conv5, stride=1, padding='SAME') + b_conv5
48.  return Con
49.
50.
51. res = tf.square(C(real_X_shaped) - C(fake_Y_shaped))
52. res = tf.reshape(res, [11, data_dim])
53. ploss = tf.reduce_sum(res, 1)*0.00001
54.
55. D_loss = tf.reduce_mean(D(fake_Y_shaped)) -　tf.reduce_mean(D(real_X_shaped) + grad_pen
56.
57. G_loss = tf.reduce_mean(ploss - D(fake_Y_shaped))
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy.

Top