Advertisement
Guest User

Untitled

a guest
Jan 23rd, 2017
94
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.45 KB | None | 0 0
  1. ######My way###########
  2. MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
  3. .seed(140)
  4. .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) //?
  5. .iterations(1)
  6. .weightInit(WeightInit.XAVIER) //?
  7. .learningRate(0.0001)
  8. .updater(Updater.NESTEROVS).momentum(0.9)
  9. .addLayer(new GravesLSTMLayerSpace.Builder()
  10. .activation("tanh")
  11. .nIn(4)
  12. .nOut(layerSizeHyperparam)
  13. .build())
  14. .addLayer(new RnnOutputLayerSpace.Builder()
  15. .activation("identity")
  16. .nIn(layerSizeHyperparam)
  17. .nOut(1)
  18. .lossFunction(LossFunctions.LossFunction.MSE)
  19. .build())
  20. .build();
  21.  
  22. CandidateGenerator<DL4JConfiguration> candidateGenerator = new RandomSearchGenerator<>(hyperparameterSpace);
  23.  
  24.  
  25.  
  26. ###########BasicHyperparameterOptimizationExample###########
  27. MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
  28. //These next few options: fixed values for all models
  29. .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
  30. .iterations(1)
  31. .regularization(true)
  32. .l2(0.0001)
  33. //Learning rate: this is something we want to test different values for
  34. .learningRate(learningRateHyperparam)
  35. .addLayer( new DenseLayerSpace.Builder()
  36. //Fixed values for this layer:
  37. .nIn(784) //Fixed input: 28x28=784 pixels for MNIST
  38. .activation("relu")
  39. //One hyperparameter to infer: layer size
  40. .nOut(layerSizeHyperparam)
  41. .build())
  42. .addLayer( new OutputLayerSpace.Builder()
  43. //nIn: set the same hyperparemeter as the nOut for the last layer.
  44. .nIn(layerSizeHyperparam)
  45. //The remaining hyperparameters: fixed for the output layer
  46. .nOut(10)
  47. .activation("softmax")
  48. .lossFunction(LossFunctions.LossFunction.MCXENT)
  49. .build())
  50. .pretrain(false).backprop(true).build();
  51.  
  52.  
  53. //Now: We need to define a few configuration options
  54. // (a) How are we going to generate candidates? (random search or grid search)
  55. CandidateGenerator<DL4JConfiguration> candidateGenerator = new RandomSearchGenerator<>(hyperparameterSpace);
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement