Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- //------------------------------------------------
- // NETWORK.CPP
- //------------------------------------------------
- #include "network.hpp"
- // Libraries
- #include "config.hpp"
- #include "errfct.hpp"
- // CODE
- using namespace std;
- network::network(vector<unsigned int> neurons_per_layer, rne &random_engine, double learning_rate) :
- _network_structure(neurons_per_layer), _neurons(neurons_per_layer.size() ),
- _random_engine(random_engine), _learnrate(learning_rate), _layerfact(1.2)
- {
- for (unsigned int i = 0; i < neurons_per_layer.size(); ++i)
- {
- _neurons[i].resize(neurons_per_layer[i], neuron(essential::fact_linear, essential::dfact_linear) );
- }
- #ifdef BIAS_NEURON
- _neurons[0].resize(neurons_per_layer[0] + 1);
- #endif //BIAS_NEURON
- _initialize_random_rates();
- }
- // Methods
- void network::_initialize_random_rates()
- {
- for (vector<vector<neuron>>::iterator layer = ++_neurons.begin();
- layer != _neurons.end(); ++layer)
- {
- #ifdef FEED_FORWARD
- vector<vector<neuron>>::iterator prev_layer = layer - 1;
- for (auto& successors: *layer)
- {
- static double rate;
- for (auto& predecessors: *prev_layer)
- {
- rate = _random_engine.splitrange(0.05,0.5);
- link_neurons(predecessors, successors, rate);
- }
- }
- #endif //FEED_FORWARD
- #ifdef FULL_FORWARD
- for (vector<vector<neuron>>::iterator prev_layers = _neurons.begin();
- prev_layers != layer; ++prev_layers)
- {
- for (auto& successors: *layer)
- {
- static double rate;
- for (auto& predecessors: *prev_layers)
- {
- rate = _random_engine.splitrange(0.05,0.5);
- link_neurons(predecessors, successors, rate);
- }
- }
- }
- #endif //FULL_FORWARD
- }
- }
- void network::_reset_netoutputs()
- {
- for (auto& layer: _neurons)
- {
- for (auto& neuron: layer)
- {
- neuron.out = 0;
- }
- }
- }
- void network::_calculate_netoutputs(vector<double> input)
- {
- unsigned input_size = input.size();
- if (input_size != _network_structure.front() )
- {
- //Throw an error --- will be implemented
- }
- for (unsigned i = 0; i < input_size; ++i)
- {
- _neurons.front()[i].out = input[i];
- }
- #ifdef BIAS_NEURON
- _neurons.front().back().out = 1; // The Bias neuron is the last neuron of the input layer, and always
- // sends a constant signal
- #endif
- for (vector<vector<neuron>>::iterator layer = ++_neurons.begin(); layer != _neurons.end(); ++layer)
- {
- for (auto& neuron: *layer)
- {
- neuron.activate();
- }
- }
- }
- void network::_train_network(vector<double> errors)
- {
- double lrate = _learnrate;
- // Train the output neurons
- for (unsigned i = 0; i < _network_structure.back(); ++i)
- {
- _neurons.back()[i].calculate_delta(errors[i]);
- _neurons.back()[i].train_connection(lrate);
- }
- // Train all, but the input/output neurons
- for (vector<vector<neuron>>::reverse_iterator rlayer = ++_neurons.rbegin();
- rlayer != --_neurons.rend(); ++rlayer)
- {
- for (auto& neuron: *rlayer)
- {
- neuron.calculate_delta();
- neuron.train_connection(lrate);
- }
- lrate *= _layerfact;
- }
- }
- double network::train_epoch(testcases tests)
- {
- vector<vector<double>> errors;
- for (auto& test: tests)
- {
- if (test[0].size() != _network_structure.front() || test[1].size() != _network_structure.back() )
- {
- //Throw an error
- }
- _reset_netoutputs();
- _calculate_netoutputs(test.front() );
- vector<double> tmp_error(_network_structure.back() );
- for (unsigned j = 0; j < tmp_error.size(); ++j)
- {
- tmp_error[j] = test.back()[j] - _neurons.back()[j].out;
- }
- errors.push_back(tmp_error);
- #ifdef LEARN_ONLINE
- _train_network(errors.back() );
- #endif //LEARN_ONLINE
- }
- #ifdef LEARN_OFFLINE
- vector<double> avg_error(_network_structure.back(), 0);
- for (unsigned i = 0; i < _network_structure.back(); ++i)
- {
- for (auto const& error: errors)
- {
- avg_error[i] += error[i];
- }
- avg_error[i] /= (double)errors.size();
- }
- _train_network(avg_error);
- #endif //LEARN_OFFLINE
- vector<double> specific_errors;
- for (auto const& error: errors)
- {
- specific_errors.push_back(_errfct(error) );
- }
- double totalerror = 0;
- for (auto& specific_error: specific_errors)
- {
- totalerror += specific_error;
- }
- return totalerror;
- }
- vector<double> network::solve(vector<double> input)
- {
- _reset_netoutputs();
- _calculate_netoutputs(input);
- vector<double> solution;
- for (auto const& output_neuron: _neurons.back() )
- {
- solution.push_back(output_neuron.out);
- }
- return solution;
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement