Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- auto layers = some<CNeuralLayers>();
- //Generate layers 1 by 1
- //...
- //Done generating layers
- auto all_layers = layers->done();
- //Now we generate the Neural network
- auto network = some<CNeuralNetwork>(all_layers);
- network->quick_connect()
- network->initialize_neural_network()
- network->set_labels(labels_train)
- //... L1/L2/Learning Rate
- //Predict the value of network for features_train
- auto labels_predict = network->apply_regression(features_train);
- //Define loss founction similar to the way it is done in the unit test of FirstOrderSAGCostFunctionInterface
- auto loss = some<FirstOrderSAGCostFunctionInterface>();
- loss->set_training_data(features_train, labels_test);
- loss->set_ith_cost_function(ith_cost_function); //ith cost function is any function that defines the loss like in the unit test of the new pr
- loss->set_total_cost_function(total_cost_function); //check new pr for examples of ith_cost_function and total_cost_function
- //Now we generate an Adam updater and SGD minimizer to minimize loss
- auto adam = AdamUpdater();
- auto optimizer = SGDMinimizer(loss); //Minimize the loss defined up
- optimizer.set_gradient_updater(adam);
- optimizer.set_learning_rate(0.001);
- //Now minimize the loss by utilizing stan auto diff
- optimizer.minimize();
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement