Advertisement
Guest User

Untitled

a guest
Jan 18th, 2017
104
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.98 KB | None | 0 0
  1. ############################### Training setup ##################################
  2.  
  3. #Define 10 folds:
  4. seed = 7
  5. np.random.seed(seed)
  6. kfold = KFold(n_splits=10, shuffle=False, random_state=None)
  7. print "Splits"
  8. cvscores_acc = []
  9. cvscores_loss = []
  10. hist = []
  11. i = 0
  12. #train_set_data_vstacked_normalized_reshaped = np.reshape(train_set_data_vstacked_normalized,train_set_data_vstacked_normalized.shape+(1,))
  13. #train_set_output_vstacked_normalized_reshaped = np.reshape(train_set_output_vstacked_normalized,train_set_output_vstacked_normalized.shape+(1,))
  14. for train, test in kfold.split(train_set_data_vstacked_normalized):
  15.  
  16. print "Model definition!"
  17. model = Sequential()
  18.  
  19. #act = PReLU(init='normal', weights=None)
  20. #model.add(Dense(output_dim=400,input_dim=400, init="normal", activation=K.tanh))
  21.  
  22. #act1 = PReLU(init='normal', weights=None)
  23.  
  24. #act2 = PReLU(init='normal', weights=None)
  25. model.add(Dense(output_dim=2050, input_dim=2050, init="normal",activation='tanh'))
  26.  
  27. model.add(Dense(output_dim=13, input_dim=2050, init="normal",activation='tanh'))
  28. model.add(Lambda(lambda x: numpy_unorm(x)))
  29.  
  30. #model.add(ELU(100))
  31.  
  32. #model.add(Convolution1D(13, 3, border_mode='same', input_shape=(2050,1)))
  33.  
  34. print "Compiling"
  35. #rms_opt = keras.optimizers.RMSprop(lr=0.01, rho=0.9, epsilon=1e-08, decay=0.0)
  36. model.compile(loss='mean_squared_error', optimizer="RMSprop")
  37. print "Compile done! "
  38.  
  39. print '\n'
  40.  
  41. print "Train start"
  42.  
  43. reduce_lr=ReduceLROnPlateau(monitor='val_loss', factor=0.01, patience=3, verbose=1, mode='auto', epsilon=0.0001, cooldown=0, min_lr=0.00000001)
  44. stop = EarlyStopping(monitor='val_loss', min_delta=0, patience=5, verbose=1, mode='auto')
  45.  
  46. log=csv_logger = CSVLogger('training_'+str(i)+'.csv')
  47.  
  48.  
  49. hist_current = model.fit(train_set_data_vstacked_normalized[train],
  50. train_set_output_vstacked[train],
  51. shuffle=False,
  52. validation_data=(train_set_data_vstacked_normalized[test],train_set_output_vstacked[test]),
  53. validation_split=0.1,
  54. nb_epoch=150,
  55. verbose=1,
  56. callbacks=[reduce_lr,log,stop])
  57.  
  58. hist.append(hist_current)
  59.  
  60. print()
  61. print model.summary()
  62. print "Model stored"
  63. model.save("Model"+str(i)+".h5")
  64. model.save_weights("Model"+str(i)+"_weights.h5")
  65. del model
  66. # serialize model to YAML
  67. #model_yaml = model.to_yaml()
  68. #with open("model.yaml", "w") as yaml_file:
  69. # yaml_file.write(model_yaml)
  70. # serialize weights to HDF5
  71. # model.save_weights("Model"+str(i)+".h5")
  72. # print("Saved model to disk")
  73.  
  74. print "New Model:"
  75. i=i+1
  76.  
  77. #print("%.2f%% (+/- %.2f%%)" % (numpy.mean(cvscores_acc), numpy.std(cvscores_acc)))
  78. #print("%.2f%% (+/- %.2f%%)" % (numpy.mean(cvscores_loss), numpy.std(cvscores_loss)))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement