Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ######My way###########
- MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
- .seed(140)
- .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) //?
- .iterations(1)
- .weightInit(WeightInit.XAVIER) //?
- .learningRate(0.0001)
- .updater(Updater.NESTEROVS).momentum(0.9)
- .addLayer(new GravesLSTMLayerSpace.Builder()
- .activation("tanh")
- .nIn(4)
- .nOut(layerSizeHyperparam)
- .build())
- .addLayer(new RnnOutputLayerSpace.Builder()
- .activation("identity")
- .nIn(layerSizeHyperparam)
- .nOut(1)
- .lossFunction(LossFunctions.LossFunction.MSE)
- .build())
- .build();
- CandidateGenerator<DL4JConfiguration> candidateGenerator = new RandomSearchGenerator<>(hyperparameterSpace);
- ###########BasicHyperparameterOptimizationExample###########
- MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
- //These next few options: fixed values for all models
- .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
- .iterations(1)
- .regularization(true)
- .l2(0.0001)
- //Learning rate: this is something we want to test different values for
- .learningRate(learningRateHyperparam)
- .addLayer( new DenseLayerSpace.Builder()
- //Fixed values for this layer:
- .nIn(784) //Fixed input: 28x28=784 pixels for MNIST
- .activation("relu")
- //One hyperparameter to infer: layer size
- .nOut(layerSizeHyperparam)
- .build())
- .addLayer( new OutputLayerSpace.Builder()
- //nIn: set the same hyperparemeter as the nOut for the last layer.
- .nIn(layerSizeHyperparam)
- //The remaining hyperparameters: fixed for the output layer
- .nOut(10)
- .activation("softmax")
- .lossFunction(LossFunctions.LossFunction.MCXENT)
- .build())
- .pretrain(false).backprop(true).build();
- //Now: We need to define a few configuration options
- // (a) How are we going to generate candidates? (random search or grid search)
- CandidateGenerator<DL4JConfiguration> candidateGenerator = new RandomSearchGenerator<>(hyperparameterSpace);
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement