Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- inputs = digits_train
- batch_size = 256
- hidden_nodes = 120
- learning_rate = 0.01
- epochs = 100
- # Tensorflow
- dataset = tf.data.Dataset.from_tensor_slices(inputs)
- dataset = dataset.batch(batch_size)
- dataset = dataset.repeat()
- dataset = dataset.shuffle(10, inputs.shape[0])
- dataset_it = dataset.make_one_shot_iterator()
- input_layer = dataset_it.get_next()
- layer_settings = {
- 'activation': tf.nn.sigmoid,
- 'kernel_initializer': tf.initializers.random_normal
- }
- layer = partial(tf.layers.dense, **layer_settings)
- hidden_layer = layer(input_layer, hidden_nodes)
- output_layer = layer(hidden_layer, inputs.shape[1])
- loss = tf.reduce_mean(tf.square(output_layer - input_layer))
- optimizer = tf.train.AdamOptimizer(learning_rate)
- training_op = optimizer.minimize(loss)
- losses = []
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
- for e in range(epochs):
- _, loss_ = sess.run([training_op, loss])
- losses.append(loss_)
- plt.plot(losses, label="Tensorflow")
- # Keras
- hidden_layer = Dense(
- hidden_nodes,
- input_shape=(inputs.shape[1],),
- activation='sigmoid',
- kernel_initializer='RandomNormal',
- )
- output_layer = Dense(
- inputs.shape[1],
- activation='sigmoid',
- kernel_initializer='RandomNormal',
- )
- model = Sequential([hidden_layer, output_layer])
- adam = Adam(lr=learning_rate)
- model.compile(optimizer=adam, loss='mean_squared_error')
- losses = []
- for e in range(epochs):
- # could be done without the for loop but I want to do
- # some other stuff here later on
- h = model.fit(inputs,
- inputs,
- batch_size=batch_size,
- initial_epoch=e,
- epochs=(e + 1),
- verbose=0)
- losses.append(h.history['loss'])
- plt.plot(losses, label="Keras")
- plt.xlabel("Epoch")
- plt.ylabel("MSE")
- plt.legend()
- for e in range(epochs):
- for _ in range(batches):
- _, loss_ = sess.run([training_op, loss])
- losses.append(loss_)
Add Comment
Please, Sign In to add comment