Guest User

Untitled

a guest
Feb 16th, 2019
115
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.94 KB | None | 0 0
  1. inputs = digits_train
  2. batch_size = 256
  3.  
  4. hidden_nodes = 120
  5.  
  6. learning_rate = 0.01
  7. epochs = 100
  8.  
  9. # Tensorflow
  10. dataset = tf.data.Dataset.from_tensor_slices(inputs)
  11. dataset = dataset.batch(batch_size)
  12. dataset = dataset.repeat()
  13. dataset = dataset.shuffle(10, inputs.shape[0])
  14.  
  15. dataset_it = dataset.make_one_shot_iterator()
  16. input_layer = dataset_it.get_next()
  17.  
  18. layer_settings = {
  19. 'activation': tf.nn.sigmoid,
  20. 'kernel_initializer': tf.initializers.random_normal
  21. }
  22.  
  23. layer = partial(tf.layers.dense, **layer_settings)
  24. hidden_layer = layer(input_layer, hidden_nodes)
  25. output_layer = layer(hidden_layer, inputs.shape[1])
  26.  
  27. loss = tf.reduce_mean(tf.square(output_layer - input_layer))
  28. optimizer = tf.train.AdamOptimizer(learning_rate)
  29. training_op = optimizer.minimize(loss)
  30.  
  31. losses = []
  32. with tf.Session() as sess:
  33. sess.run(tf.global_variables_initializer())
  34.  
  35. for e in range(epochs):
  36. _, loss_ = sess.run([training_op, loss])
  37. losses.append(loss_)
  38.  
  39. plt.plot(losses, label="Tensorflow")
  40.  
  41. # Keras
  42. hidden_layer = Dense(
  43. hidden_nodes,
  44. input_shape=(inputs.shape[1],),
  45. activation='sigmoid',
  46. kernel_initializer='RandomNormal',
  47. )
  48.  
  49. output_layer = Dense(
  50. inputs.shape[1],
  51. activation='sigmoid',
  52. kernel_initializer='RandomNormal',
  53. )
  54.  
  55. model = Sequential([hidden_layer, output_layer])
  56.  
  57. adam = Adam(lr=learning_rate)
  58. model.compile(optimizer=adam, loss='mean_squared_error')
  59.  
  60. losses = []
  61. for e in range(epochs):
  62. # could be done without the for loop but I want to do
  63. # some other stuff here later on
  64.  
  65. h = model.fit(inputs,
  66. inputs,
  67. batch_size=batch_size,
  68. initial_epoch=e,
  69. epochs=(e + 1),
  70. verbose=0)
  71.  
  72. losses.append(h.history['loss'])
  73.  
  74. plt.plot(losses, label="Keras")
  75.  
  76. plt.xlabel("Epoch")
  77. plt.ylabel("MSE")
  78.  
  79. plt.legend()
  80.  
  81. for e in range(epochs):
  82. for _ in range(batches):
  83. _, loss_ = sess.run([training_op, loss])
  84.  
  85. losses.append(loss_)
Add Comment
Please, Sign In to add comment