Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import keras
- from keras.datasets import mnist
- from keras.models import Sequential
- from keras.layers import Dense, Dropout, LeakyReLU, Conv2D
- from keras.optimizers import RMSprop
- from keras import backend as K
- K.tensorflow_backend._get_available_gpus()
- from matplotlib.pyplot import imshow
- import numpy as np
- from tensorflow.examples.tutorials.mnist import input_data
- # 讀入 MNIST
- (x_train, y_train), (x_test, y_test) = mnist.load_data()
- # the data, shuffled and split between train and test sets
- x_train = x_train.reshape(60000 , 784)
- x_test = x_test.reshape(10000, 784)
- x_train = x_train.astype('float32')
- x_test = x_test.astype('float32')
- x_train /= 255
- x_test /= 255
- # convert class vectors to binary class matrices
- num_classes = 10
- y_train = keras.utils.to_categorical(y_train, num_classes)
- y_test = keras.utils.to_categorical(y_test, num_classes)
- print(x_train.shape, 'train samples')
- print(x_test.shape, 'test samples')
- print(y_train.shape, 'train labels')
- print(y_test.shape, 'test labels')
- # modeling
- model = Sequential()
- model.add(Dense(680, activation=LeakyReLU(), input_shape=(784,)))
- model.add(Dense(1230, activation=LeakyReLU()))
- model.add(Dense(10, activation='softmax'))
- #model.summary()
- print('modling complete')
- # training
- model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
- print('training complete')
- # testing
- model.fit(x_train, y_train, batch_size=150, epochs=20, verbose=1)
- scores = model.evaluate(x_test, y_test)
- print('Acc on testing data : ', scores[1])
Add Comment
Please, Sign In to add comment