Advertisement
Guest User

Untitled

a guest
Jan 26th, 2020
110
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.51 KB | None | 0 0
  1. ### Answer 3
  2. from keras.models import Sequential
  3. from keras.layers import Dense
  4. from keras.wrappers.scikit_learn import KerasClassifier
  5. from keras.wrappers.scikit_learn import KerasRegressor
  6.  
  7. from sklearn.model_selection import GridSearchCV
  8. import numpy
  9.  
  10. tr_x = np.stack(top_train)
  11. tr_y = np.stack(bot_train)
  12.  
  13. def create_model():
  14. model = Sequential()
  15.  
  16. model.add(Dense(units = 100, activation = 'relu', kernel_initializer = 'he_normal', input_dim = 1537))
  17.  
  18. model.add(Dense(units = 1536, activation = 'linear', kernel_initializer = 'he_normal'))
  19. # Describe the loss and how it is optimized
  20. # TODO: May need to adjust learning rate parameter used by adam in cv
  21. model.compile(loss = 'mean_squared_error', optimizer = 'adam', metrics = ['accuracy'])
  22. return model
  23.  
  24. model = KerasRegressor(build_fn=create_model, verbose=0)
  25. #batch_size = [5, 10, 15, 20, 40, 60, 80, 100]
  26. #epochs = [10, 50, 100, 150]
  27. batch_size = [5, 10]
  28. epochs = [10, 20]
  29. param_grid = dict(batch_size=batch_size, epochs=epochs)
  30.  
  31. grid = GridSearchCV(estimator=model, param_grid=param_grid, scoring='neg_mean_squared_error', cv=2, n_jobs=2)
  32. grid_result = grid.fit(tr_x, tr_y)
  33.  
  34. # summarize results
  35. print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
  36. means = grid_result.cv_results_['mean_test_score']
  37. stds = grid_result.cv_results_['std_test_score']
  38. params = grid_result.cv_results_['params']
  39. for mean, stdev, param in zip(means, stds, params):
  40. print("%f (%f) with: %r" % (mean, stdev, param))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement