Guest User

Untitled

a guest
Nov 17th, 2018
91
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.14 KB | None | 0 0
  1. void NeuralNetwork::FeedForwardNN()
  2. {
  3. // OBSOLETE
  4. for (int hiddenIndex = 0; hiddenIndex < m_hiddenLayers->m_neurons.size(); hiddenIndex++)
  5. {
  6. double sum = 0;
  7. for (int index = 0; index < m_inputs->m_neurons.size(); index++)
  8. {
  9. sum += (m_inputs->m_neurons.at(index).m_value)*(m_inputs->m_neurons.at(index).m_weights.at(hiddenIndex));
  10. }
  11. //sum += m_inputs->m_bias.m_value * m_inputs->m_bias.m_weights.at(hiddenIndex);
  12.  
  13. m_hiddenLayers->m_neurons.at(hiddenIndex).m_sumOfPreviousLayer = sum;
  14. m_hiddenLayers->m_neurons.at(hiddenIndex).m_value = GetActivationValue(sum);
  15. }
  16.  
  17. for (int outputIndex = 0; outputIndex < m_outputs->m_neurons.size(); outputIndex++)
  18. {
  19. double sum = 0;
  20. for (int hiddenIndex = 0; hiddenIndex < m_hiddenLayers->m_neurons.size(); hiddenIndex++)
  21. {
  22. sum += (m_hiddenLayers->m_neurons.at(hiddenIndex).m_value)*(m_hiddenLayers->m_neurons.at(hiddenIndex).m_weights.at(outputIndex));
  23. }
  24.  
  25. //sum += m_hiddenLayers->m_bias.m_value * m_hiddenLayers->m_bias.m_weights.at(outputIndex);
  26.  
  27. m_outputs->m_neurons.at(outputIndex).m_sumOfPreviousLayer = sum;
  28. m_outputs->m_neurons.at(outputIndex).m_value = GetActivationValue(sum);
  29. }
  30. }
Add Comment
Please, Sign In to add comment