Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ### Answer 3
- from keras.models import Sequential
- from keras.layers import Dense
- from keras.wrappers.scikit_learn import KerasClassifier
- from keras.wrappers.scikit_learn import KerasRegressor
- from sklearn.model_selection import GridSearchCV
- import numpy
- tr_x = np.stack(top_train)
- tr_y = np.stack(bot_train)
- def create_model():
- model = Sequential()
- model.add(Dense(units = 100, activation = 'relu', kernel_initializer = 'he_normal', input_dim = 1537))
- model.add(Dense(units = 1536, activation = 'linear', kernel_initializer = 'he_normal'))
- # Describe the loss and how it is optimized
- # TODO: May need to adjust learning rate parameter used by adam in cv
- model.compile(loss = 'mean_squared_error', optimizer = 'adam', metrics = ['accuracy'])
- return model
- model = KerasRegressor(build_fn=create_model, verbose=0)
- #batch_size = [5, 10, 15, 20, 40, 60, 80, 100]
- #epochs = [10, 50, 100, 150]
- batch_size = [5, 10]
- epochs = [10, 20]
- param_grid = dict(batch_size=batch_size, epochs=epochs)
- grid = GridSearchCV(estimator=model, param_grid=param_grid, scoring='neg_mean_squared_error', cv=2, n_jobs=2)
- grid_result = grid.fit(tr_x, tr_y)
- # summarize results
- print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
- means = grid_result.cv_results_['mean_test_score']
- stds = grid_result.cv_results_['std_test_score']
- params = grid_result.cv_results_['params']
- for mean, stdev, param in zip(means, stds, params):
- print("%f (%f) with: %r" % (mean, stdev, param))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement