Advertisement
Guest User

network.cpp_old

a guest
Nov 9th, 2016
149
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 5.10 KB | None | 0 0
  1. //------------------------------------------------
  2. //  NETWORK.CPP
  3. //------------------------------------------------
  4.  
  5. #include "network.hpp"
  6.  
  7. // Libraries
  8. #include "config.hpp"
  9. #include "errfct.hpp"
  10.  
  11. // CODE
  12.  
  13. using namespace std;
  14.  
  15. network::network(vector<unsigned int> neurons_per_layer, rne &random_engine, double learning_rate) :
  16.     _network_structure(neurons_per_layer), _neurons(neurons_per_layer.size() ),
  17.     _random_engine(random_engine), _learnrate(learning_rate), _layerfact(1.2)
  18. {
  19.     for (unsigned int i = 0; i < neurons_per_layer.size(); ++i)
  20.     {
  21.         _neurons[i].resize(neurons_per_layer[i], neuron(essential::fact_linear, essential::dfact_linear) );
  22.     }
  23.  
  24. #ifdef BIAS_NEURON
  25.     _neurons[0].resize(neurons_per_layer[0] + 1);
  26. #endif //BIAS_NEURON
  27.  
  28.     _initialize_random_rates();
  29. }
  30.  
  31.  
  32. // Methods
  33.  
  34. void network::_initialize_random_rates()
  35. {
  36.     for (vector<vector<neuron>>::iterator layer = ++_neurons.begin();
  37.             layer != _neurons.end(); ++layer)
  38.     {
  39. #ifdef FEED_FORWARD
  40.         vector<vector<neuron>>::iterator prev_layer = layer - 1;
  41.         for (auto& successors: *layer)
  42.         {
  43.             static double rate;
  44.             for (auto& predecessors: *prev_layer)
  45.             {
  46.                 rate = _random_engine.splitrange(0.05,0.5);
  47.                 link_neurons(predecessors, successors, rate);
  48.             }
  49.         }
  50. #endif //FEED_FORWARD
  51.  
  52. #ifdef FULL_FORWARD
  53.         for (vector<vector<neuron>>::iterator prev_layers = _neurons.begin();
  54.                 prev_layers != layer; ++prev_layers)
  55.         {
  56.             for (auto& successors: *layer)
  57.             {
  58.                 static double rate;
  59.                 for (auto& predecessors: *prev_layers)
  60.                 {
  61.                     rate  = _random_engine.splitrange(0.05,0.5);
  62.                     link_neurons(predecessors, successors, rate);
  63.                 }
  64.             }
  65.         }
  66. #endif //FULL_FORWARD
  67.     }
  68. }
  69.  
  70. void network::_reset_netoutputs()
  71. {
  72.     for (auto& layer: _neurons)
  73.     {
  74.         for (auto& neuron: layer)
  75.         {
  76.             neuron.out = 0;
  77.         }
  78.     }
  79. }
  80.  
  81. void network::_calculate_netoutputs(vector<double> input)
  82. {
  83.     unsigned input_size = input.size();
  84.     if (input_size != _network_structure.front() )
  85.     {
  86.         //Throw an error --- will be implemented
  87.     }
  88.  
  89.     for (unsigned i = 0; i < input_size; ++i)
  90.     {
  91.         _neurons.front()[i].out = input[i];
  92.     }
  93.  
  94. #ifdef BIAS_NEURON
  95.     _neurons.front().back().out = 1; // The Bias neuron is the last neuron of the input layer, and always
  96.     // sends a constant signal
  97. #endif
  98.  
  99.     for (vector<vector<neuron>>::iterator layer = ++_neurons.begin(); layer != _neurons.end(); ++layer)
  100.     {
  101.         for (auto& neuron: *layer)
  102.         {
  103.             neuron.activate();
  104.         }
  105.     }
  106. }
  107.  
  108. void network::_train_network(vector<double> errors)
  109. {
  110.     double lrate = _learnrate;
  111.  
  112.     // Train the output neurons
  113.     for (unsigned i = 0; i < _network_structure.back(); ++i)
  114.     {
  115.         _neurons.back()[i].calculate_delta(errors[i]);
  116.         _neurons.back()[i].train_connection(lrate);
  117.     }
  118.  
  119.     // Train all, but the input/output neurons
  120.     for (vector<vector<neuron>>::reverse_iterator rlayer = ++_neurons.rbegin();
  121.             rlayer != --_neurons.rend(); ++rlayer)
  122.     {
  123.         for (auto& neuron: *rlayer)
  124.         {
  125.             neuron.calculate_delta();
  126.             neuron.train_connection(lrate);
  127.         }
  128.         lrate *= _layerfact;
  129.     }
  130. }
  131.  
  132.  
  133. double network::train_epoch(testcases tests)
  134. {
  135.     vector<vector<double>> errors;
  136.     for (auto& test: tests)
  137.     {
  138.         if (test[0].size() != _network_structure.front() || test[1].size() != _network_structure.back() )
  139.         {
  140.             //Throw an error
  141.         }
  142.  
  143.         _reset_netoutputs();
  144.         _calculate_netoutputs(test.front() );
  145.  
  146.         vector<double> tmp_error(_network_structure.back() );
  147.         for (unsigned j = 0; j < tmp_error.size(); ++j)
  148.         {
  149.             tmp_error[j] = test.back()[j] - _neurons.back()[j].out;
  150.         }
  151.         errors.push_back(tmp_error);
  152.  
  153. #ifdef LEARN_ONLINE
  154.         _train_network(errors.back() );
  155. #endif //LEARN_ONLINE
  156.     }
  157. #ifdef LEARN_OFFLINE
  158.     vector<double> avg_error(_network_structure.back(), 0);
  159.     for (unsigned i = 0; i < _network_structure.back(); ++i)
  160.     {
  161.         for (auto const& error: errors)
  162.         {
  163.             avg_error[i] += error[i];
  164.         }
  165.         avg_error[i] /= (double)errors.size();
  166.     }
  167.     _train_network(avg_error);
  168. #endif //LEARN_OFFLINE
  169.  
  170.     vector<double> specific_errors;
  171.     for (auto const& error: errors)
  172.     {
  173.         specific_errors.push_back(_errfct(error) );
  174.     }
  175.  
  176.     double totalerror = 0;
  177.     for (auto& specific_error: specific_errors)
  178.     {
  179.         totalerror += specific_error;
  180.     }
  181.  
  182.     return totalerror;
  183. }
  184.  
  185. vector<double> network::solve(vector<double> input)
  186. {
  187.     _reset_netoutputs();
  188.     _calculate_netoutputs(input);
  189.  
  190.     vector<double> solution;
  191.     for (auto const& output_neuron: _neurons.back() )
  192.     {
  193.         solution.push_back(output_neuron.out);
  194.     }
  195.     return solution;
  196. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement