Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import java.util.Random;
- import org.deeplearning4j.nn.api.Layer;
- import org.deeplearning4j.nn.api.OptimizationAlgorithm;
- import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
- import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
- import org.deeplearning4j.nn.conf.NeuralNetConfiguration.ListBuilder;
- import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
- import org.deeplearning4j.nn.conf.layers.DenseLayer;
- import org.deeplearning4j.nn.conf.layers.OutputLayer;
- import org.deeplearning4j.nn.conf.layers.OutputLayer.Builder;
- import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
- import org.deeplearning4j.nn.weights.WeightInit;
- import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
- import org.nd4j.linalg.activations.Activation;
- import org.nd4j.linalg.api.ndarray.INDArray;
- import org.nd4j.linalg.dataset.DataSet;
- import org.nd4j.linalg.factory.Nd4j;
- import org.nd4j.linalg.lossfunctions.LossFunctions;
- /**
- * This basic example shows how to manually create a DataSet and train it to an
- * basic Network.
- * <p>
- * The network consists in 2 input-neurons, 1 hidden-layer with 4
- * hidden-neurons, and 2 output-neurons.
- * <p>
- * I choose 2 output neurons, (the first fires for false, the second fires for
- * true) because the Evaluation class needs one neuron per classification.
- *
- * @author Peter Großmann
- */
- public class FFMultiply
- {
- private static final int SAMPLES = 500;
- private static final int LAYER_WIDTH = 2;
- private static final int HIDDEN_LAYERS = 0;
- private static final Random r = new Random(9385);
- public static void main( String[] args )
- {
- INDArray inputData = Nd4j.zeros(SAMPLES, 2);
- INDArray outputData = Nd4j.zeros(SAMPLES, 1);
- for (int i = 0; i < SAMPLES; i++)
- {
- double a = r.nextDouble() * 0.5;
- double b = r.nextDouble() * 0.5;
- double c = a + b;
- inputData.putScalar(new int[] { i, 0 }, a);
- inputData.putScalar(new int[] { i, 1 }, b);
- outputData.putScalar(new int[] { i, 0 }, c);
- }
- DataSet ds = new DataSet(inputData, outputData);
- NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder();
- builder.iterations(100);
- builder.learningRate(0.01);
- builder.seed(123);
- builder.useDropConnect(false);
- builder.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);
- builder.biasInit(0);
- // TODO Enable?
- builder.miniBatch(true);
- ListBuilder listBuilder = builder.list();
- listBuilder.layer(0, new DenseLayer.Builder().nIn(2).nOut(LAYER_WIDTH).activation(Activation.IDENTITY).weightInit(WeightInit.DISTRIBUTION)
- .dist(new UniformDistribution(0, 1)).build());
- DenseLayer.Builder hiddenLayerBuilder = new DenseLayer.Builder().nIn(LAYER_WIDTH).nOut(LAYER_WIDTH).activation(Activation.IDENTITY)
- .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1));
- for (int i = 0; i < HIDDEN_LAYERS; i++)
- {
- listBuilder.layer(i + 1, hiddenLayerBuilder.build());
- }
- Builder outputLayerBuilder = new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
- outputLayerBuilder.nIn(LAYER_WIDTH);
- outputLayerBuilder.nOut(1);
- outputLayerBuilder.activation(Activation.IDENTITY);
- outputLayerBuilder.weightInit(WeightInit.DISTRIBUTION);
- outputLayerBuilder.dist(new UniformDistribution(0, 1));
- listBuilder.layer(HIDDEN_LAYERS + 1, outputLayerBuilder.build());
- // TODO What is this?
- listBuilder.pretrain(false);
- // seems to be mandatory
- // according to agibsonccc: You typically only use that with
- // pretrain(true) when you want to do pretrain/finetune without changing
- // the previous layers finetuned weights that's for autoencoders and
- // rbms
- // TODO Huh?
- listBuilder.backprop(true);
- // build and init the network, will check if everything is configured
- // correct
- MultiLayerConfiguration conf = listBuilder.build();
- MultiLayerNetwork net = new MultiLayerNetwork(conf);
- net.init();
- // add an listener which outputs the error every 100 parameter updates
- net.setListeners(new ScoreIterationListener(1));
- // C&P from GravesLSTMCharModellingExample
- // Print the number of parameters in the network (and for each layer)
- Layer[] layers = net.getLayers();
- int totalNumParams = 0;
- for (int i = 0; i < layers.length; i++)
- {
- int nParams = layers[i].numParams();
- System.out.println("Number of parameters in layer " + i + ": " + nParams);
- totalNumParams += nParams;
- }
- System.out.println("Total number of network parameters: " + totalNumParams);
- // here the actual learning takes place
- // net.fit(ds);
- // create output for every training sample
- INDArray output = net.output(ds.getFeatureMatrix());
- System.out.println(output);
- System.out.println(outputData);
- // let Evaluation prints stats how often the right output had the
- // highest value
- // Evaluation eval = new Evaluation(2);
- // eval.eval(ds.getLabels(), output);
- // System.out.println(eval.stats());
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement