Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def get_optimizer(name=None):
- if name is not None:
- if name.lower() == 'sgd':
- return keras.optimizers.SGD(lr=0.01, decay=0.1, momentum=0.1, nesterov=False)
- # Use RMSProp by default
- return keras.optimizers.RMSprop(lr=0.01, rho=0.9, epsilon=1e-08)
- try:
- n, d, x, y, q_elist_map, f_names = extract_input_output(feature_file)
- # Create the model
- num_epochs = 1000
- weights_init = 'uniform'
- activation = 'sigmoid'
- optimizer_name = 'RMSProp'
- #optimizer_name = 'SGD'
- np.random.seed(1) # For reproducibility of results
- model = Sequential()
- model.add(Dense(input_dim=d, output_dim=1, init=weights_init, activation=activation))
- model.compile(loss='mse', optimizer=get_optimizer(optimizer_name))
- # Train the model
- history = model.fit(x, y, nb_epoch=num_epochs, batch_size=20, verbose=0, validation_split=0.1)
- # Evaluate the model (print Evaluation score)
- score = model.evaluate(x, y)
- except Exception as e:
- print 'Exception caught!'
- traceback.print_exc()
Add Comment
Please, Sign In to add comment