SHARE
TWEET

Keras Model Gradient Descent

Iceflame007 Jul 3rd, 2016 595 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. def get_optimizer(name=None):
  2.     if name is not None:
  3.         if name.lower() == 'sgd':
  4.             return keras.optimizers.SGD(lr=0.01, decay=0.1, momentum=0.1, nesterov=False)
  5.     # Use RMSProp by default
  6.     return keras.optimizers.RMSprop(lr=0.01, rho=0.9, epsilon=1e-08)
  7.  
  8. try:
  9.     n, d, x, y, q_elist_map, f_names = extract_input_output(feature_file)
  10.     # Create the model
  11.     num_epochs = 1000
  12.     weights_init = 'uniform'
  13.     activation = 'sigmoid'
  14.     optimizer_name = 'RMSProp'
  15.     #optimizer_name = 'SGD'
  16.    
  17.     np.random.seed(1)    # For reproducibility of results
  18.     model = Sequential()
  19.     model.add(Dense(input_dim=d, output_dim=1, init=weights_init, activation=activation))
  20.     model.compile(loss='mse', optimizer=get_optimizer(optimizer_name))
  21.    
  22.     # Train the model
  23.     history = model.fit(x, y, nb_epoch=num_epochs, batch_size=20, verbose=0, validation_split=0.1)
  24.     # Evaluate the model (print Evaluation score)
  25.     score = model.evaluate(x, y)
  26.  
  27. except Exception as e:
  28.     print 'Exception caught!'
  29.     traceback.print_exc()
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
Top