Advertisement
Guest User

Untitled

a guest
Mar 25th, 2017
126
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.39 KB | None | 0 0
  1. seed = 7
  2. numpy.random.seed(seed)
  3.  
  4. # load data
  5. (X_train, y_train), (X_test, y_test) = mnist.load_data()
  6. # reshape to be [samples][pixels][width][height]
  7. X_train = X_train.reshape(X_train.shape[0], 1, 28, 28).astype('float32')
  8. X_test = X_test.reshape(X_test.shape[0], 1, 28, 28).astype('float32')
  9.  
  10. # normalize inputs from 0-255 to 0-1
  11. X_train = X_train / 255
  12. X_test = X_test / 255
  13. # one hot encode outputs
  14. y_train = np_utils.to_categorical(y_train)
  15. y_test = np_utils.to_categorical(y_test)
  16. num_classes = y_test.shape[1]
  17.  
  18. def larger_model():
  19. # create model
  20. model = Sequential()
  21. model.add(Convolution2D(30, 5, 5, border_mode='valid', input_shape=(1, 28, 28), activation='relu'))
  22. model.add(MaxPooling2D(pool_size=(2, 2)))
  23. model.add(Convolution2D(15, 3, 3, activation='relu'))
  24. model.add(MaxPooling2D(pool_size=(2, 2)))
  25. model.add(Dropout(0.2))
  26. model.add(Flatten())
  27. model.add(Dense(128, activation='relu'))
  28. model.add(Dense(50, activation='relu'))
  29. model.add(Dense(num_classes, activation='softmax'))
  30. # Compile model
  31. model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
  32. return model
  33.  
  34. # build the model
  35. model = larger_model()
  36. # Fit the model
  37. model.fit(X_train, y_train, validation_data=(X_test, y_test), nb_epoch=10, batch_size=200, verbose=2)
  38. # Final evaluation of the model
  39. scores = model.evaluate(X_test, y_test, verbose=0)
  40. print("Baseline Error: %.2f%%" % (100-scores[1]*100))`import numpy
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement