Advertisement
Guest User

Untitled

a guest
May 30th, 2018
136
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.25 KB | None | 0 0
  1. auto layers = some<CNeuralLayers>();
  2. //Generate layers 1 by 1
  3. //...
  4. //Done generating layers
  5. auto all_layers = layers->done();
  6.  
  7.  
  8. //Now we generate the Neural network
  9. auto network = some<CNeuralNetwork>(all_layers);
  10. network->quick_connect()
  11. network->initialize_neural_network()
  12. network->set_labels(labels_train)
  13. //... L1/L2/Learning Rate
  14.  
  15.  
  16. //Predict the value of network for features_train
  17. auto labels_predict = network->apply_regression(features_train);
  18.  
  19. //Define loss founction similar to the way it is done in the unit test of FirstOrderSAGCostFunctionInterface
  20. auto loss = some<FirstOrderSAGCostFunctionInterface>();
  21. loss->set_training_data(features_train, labels_test);
  22. loss->set_ith_cost_function(ith_cost_function); //ith cost function is any function that defines the loss like in the unit test of the new pr
  23. loss->set_total_cost_function(total_cost_function); //check new pr for examples of ith_cost_function and total_cost_function
  24.  
  25. //Now we generate an Adam updater and SGD minimizer to minimize loss
  26. auto adam = AdamUpdater();
  27. auto optimizer = SGDMinimizer(loss); //Minimize the loss defined up
  28. optimizer.set_gradient_updater(adam);
  29. optimizer.set_learning_rate(0.001);
  30.  
  31. //Now minimize the loss by utilizing stan auto diff
  32. optimizer.minimize();
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement