Advertisement
Guest User

Untitled

a guest
Jun 24th, 2019
393
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.69 KB | None | 0 0
  1. import tensorflow as tf
  2. from tensorflow.keras.layers import Dense, BatchNormalization, Dropout
  3. from tensorflow.keras.models import Sequential
  4. from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
  5. from hyperopt import space_eval, Trials, hp, fmin, STATUS_OK, tpe
  6.  
  7. mnist = tf.keras.datasets.mnist
  8. (x_train, y_train), (x_test, y_test) = mnist.load_data()
  9.  
  10. x_train = np.reshape(x_train, (x_train.shape[0], 784)) / 255.0
  11. x_test = np.reshape(x_test, (x_test.shape[0], 784)) / 255.0
  12.  
  13. y_train = tf.keras.utils.to_categorical(y_train, 10)
  14. y_test = tf.keras.utils.to_categorical(y_test, 10)
  15.  
  16. space = {
  17. 'dense_units': hp.choice('Dense Units', [512, 256, 128, 64, 32]),
  18. 'dropout_p': hp.choice('Dropout Percentage', np.arange(0., 1., .1)),
  19. 'activations': hp.choice('Activations', ['relu', 'sigmoid']),
  20. 'kernel_init': hp.choice('Kernal Init', ['glorot_uniform', 'glorot_normal',
  21. 'he_normal', 'he_uniform']),
  22. 'optimizers': hp.choice('Optimizers', ['Adam', 'RMSprop', 'SGD']),
  23. 'batch_size': hp.choice('Batch Size', [16, 32, 64, 128, 256])
  24. }
  25.  
  26. def objective(params, epochs=100, verbose=1):
  27. # architecture
  28. model = Sequential([
  29. # layer1
  30. Dense(params['dense_units'], activation= params['activations'],
  31. input_shape=(784,),
  32. kernel_initializer= params['kernel_init']),
  33. BatchNormalization(),
  34. Dropout(params['dropout_p']),
  35. # layer2
  36. Dense(params['dense_units'], activation= params['activations'],
  37. kernel_initializer= params['kernel_init']),
  38. BatchNormalization(),
  39. Dropout(params['dropout_p']),
  40. # layer3
  41. Dense(params['dense_units'], activation= params['activations'],
  42. kernel_initializer= params['kernel_init']),
  43. BatchNormalization(),
  44. Dropout(params['dropout_p']),
  45. #output
  46. Dense(10, activation='softmax')])
  47.  
  48. # model compilation
  49. model.compile(loss='categorical_crossentropy', metrics=['accuracy'],
  50. optimizer=params['optimizers'])
  51. # callbacks
  52. e = EarlyStopping(monitor='val_loss', patience=10, mode='min', verbose=verbose)
  53. m = ModelCheckpoint('best_weights.hdf5', monitor='val_loss', save_best_only=True,
  54. mode='min', verbose=verbose)
  55. # fitting the model
  56. result = model.fit(x_train, y_train, batch_size=params['batch_size'], epochs=epochs,
  57. verbose=verbose, validation_split=0.2, callbacks=[e, m])
  58. # loss
  59. val_loss = np.amin(result.history['val_loss'])
  60. return {'loss': val_loss, 'status': STATUS_OK, 'model': model}
  61.  
  62. result = fmin(objective, space, algo=tpe.suggest, trials=Trials(), max_evals=5)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement