Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- import numpy as np
- import random
- from multiprocessing import pool as mpp
- import itertools as it
- import matplotlib.pyplot as plt
- def onehotencode(index, n):
- return [1.0 if i == index else 0.0 for i in range(n)]
- if __name__ == "__main__":
- (x_train_data, y_train_data), (test_x_data, test_y_data) = tf.keras.datasets.mnist.load_data()
- x_train_data = np.reshape(x_train_data, (-1, 28 * 28))
- test_x_data = np.reshape(test_x_data, (-1, 28 * 28))
- print(x_train_data.shape)
- print(test_x_data.shape)
- pool = mpp.Pool(8)
- y_train_data = np.array(pool.starmap(onehotencode, zip(y_train_data, it.repeat(10))))
- test_y_data = np.array(pool.starmap(onehotencode, zip(test_y_data, it.repeat(10))))
- pool.close()
- pool.join()
- input_layer = tf.keras.layers.Input((784,))
- hidden_layer_1 = tf.keras.layers.Dense(units=10, activation=tf.keras.activations.tanh)(input_layer)
- hidden_layer_2 = tf.keras.layers.Dense(units=10, activation=tf.keras.activations.tanh)(hidden_layer_1)
- hidden_layer_3 = tf.keras.layers.Dense(units=10, activation=tf.keras.activations.tanh)(hidden_layer_2)
- output_layer = tf.keras.layers.Dense(units=10, activation=tf.keras.activations.softmax)(hidden_layer_3)
- model = tf.keras.Model(inputs=input_layer, outputs=output_layer)
- model.compile(tf.keras.optimizers.SGD(0.01), tf.keras.losses.mean_squared_error, ["accuracy"])
- plt.ion()
- plt.show()
- sampleCount = 15
- sampleIndicies = random.sample(range(test_x_data.shape[0]), sampleCount)
- sample = test_x_data[sampleIndicies]
- sampleview = np.reshape(sample, (-1, 28, 28, 1))
- sampleview = np.tile(sampleview, (1, 1, 1, 3))
- fig, ax = plt.subplots(sampleCount, 2)
- def showplt(epoch, logs):
- if epoch % 2 == 0:
- classifications = model.predict_on_batch(sample)
- for i in range(sampleCount):
- ax[i][0].cla() # clear the plot
- ax[i][1].cla() # clear the plot
- ax[i][0].imshow(sampleview[i])
- ax[i][1].bar(range(10), classifications[i])
- plt.draw()
- plt.pause(0.0001)
- pltcallback = tf.keras.callbacks.LambdaCallback(on_epoch_end=showplt)
- model.fit(x_train_data, y_train_data, 300, 1000, validation_data=[test_x_data, test_y_data],
- callbacks=[])
- # dropout_layer3 = tf.keras.layers.Dropout(0.4)(hidden_layer_3)+
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement