Guest User

Untitled

a guest
Oct 22nd, 2017
73
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.49 KB | None | 0 0
  1. from utils.experiment_utils import setup_experiments
  2. from collections import OrderedDict
  3.  
  4. # when running an experiment override the parameter you're interested in
  5. base_experiment = OrderedDict([
  6. ('id', 'exp'),
  7. ('epoch', 10000), # nr epochs
  8. ('nr_points', 1000), # nr test examples
  9. ('sensors', np.array([0, 0, 10, 0])), # sensor locations [s1_x, s1_y, s2_x, s2_y .. ]
  10. ('sigma', 1), # sigma for gaussian noise on output vector
  11. ('grid_width', 10), # grid width
  12. ('grid_height', 10), # grid height
  13. ('nh_1', 4), # nr units in first hidden layer
  14. ('loss_function', 'categorical_crossentropy'), # loss function
  15. ('hidden_activation', 'tanh'), # activation for hidden layers
  16. ('output_activation', 'sigmoid'), # final activation
  17. ('optimizer', 'adam') # optimizer
  18. ])
  19.  
  20.  
  21. # what experiments do we want to run
  22. exp_settings = [
  23. {'param': 'nh_1', 'values': np.arange(1, 17)}, # test nr hidden layers
  24. {'param': 'optimizer', 'values': ['sgd', 'adam']}, # optimizing algorithms
  25. {'param': 'sigma', 'values': [0, 0.5, 1, 2]}, # sigma
  26. {'param': 'hidden_activation', 'values': ['tanh', 'relu']}, # activation for hidden layer
  27. {'param': 'output_activation', 'values': ['sigmoid', 'softmax']}, # activation for output layer
  28. {'param': 'nr_points', 'values': [100, 500, 1000, 2500, 5000, 10000]} # nr training examples
  29. ]
  30.  
  31. experiments = setup_experiments(exp_settings, base_experiment)
Add Comment
Please, Sign In to add comment