Guest User

Untitled

a guest
Feb 19th, 2018
48
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.10 KB | None | 0 0
  1. vector <float> forward(vector <float> input)
  2. {
  3. unsigned in_size = input.size();
  4. unsigned out_size = output.size();
  5. unsigned matrix_size = matrix.size();
  6. for (int i = 0; i < in_size; i++)
  7. matrix[0][i].input = input[i];
  8. for (int i = 0; i < matrix_size; i++)
  9. {
  10. unsigned next_layer = i + 1;
  11. unsigned layer_size = matrix[i].size();
  12. for (int j = 0; j < layer_size; j++)
  13. {
  14. matrix[i][j].input += leaky_relu(matrix[i][j].bias);
  15. matrix[i][j].output = leaky_relu(matrix[i][j].input);
  16. unsigned bond_size = matrix[i][j].bond.size();
  17. for (int k = 0; k < bond_size; k++)
  18. matrix[next_layer][k].input += matrix[i][j].output * matrix[i][j].bond[k].weight;
  19. }
  20. }
  21. matrix_size--;
  22. vector <float> output(matrix[matrix_size].size());
  23. for (int i = 0; i < out_size; i++)
  24. output[i] = matrix[matrix_size][i].output;
  25. return output;
  26. }
  27.  
  28. float backward(vector <float> target)
  29. {
  30. float error = 0;
  31. unsigned matrix_size = matrix.size() - 1;
  32. unsigned target_size = target.size();
  33. for (int i = 0; i < target_size; i++)
  34. {
  35. matrix[matrix_size][i].gradient = (target[i] - matrix[matrix_size][i].output) * leaky_relu_deriv(matrix[matrix_size][i].output);
  36. error += abs(matrix[matrix_size][i].gradient);
  37. }
  38. error /= target_size + !target_size;
  39. for (int i = matrix_size - 1; i >= 0; i--)
  40. {
  41. unsigned layer_size = matrix[i].size();
  42. unsigned next_layer = i + 1;
  43. for (int j = 0; j < layer_size; j++)
  44. {
  45. float delta = 0;
  46. unsigned bond_size = matrix[i][j].bond.size();
  47. for (int k = 0; k < bond_size; k++)
  48. delta += matrix[i][j].bond[k].weight * matrix[next_layer][k].gradient;
  49. matrix[i][j].gradient = delta * leaky_relu_deriv(matrix[i][j].output);
  50. }
  51. }
  52. for (int i = 0; i < matrix_size; i++)
  53. {
  54. unsigned layer_size = matrix[i].size();
  55. unsigned next_layer = i + 1;
  56. for (int j = 0; j < layer_size; j++)
  57. {
  58. matrix[i][j].bias += learning_rate * matrix[i][j].gradient;
  59. unsigned bond_size = matrix[i][j].bond.size();
  60. for (int k = 0; k < bond_size; k++)
  61. {
  62. matrix[i][j].bond[k].weight += learning_rate * matrix[next_layer][k].gradient * matrix[i][j].output;
  63. }
  64. }
  65. }
  66. return error;
  67. }
  68.  
  69. vector <unsigned> topology(4);
  70. topology[0] = 2;
  71. topology[1] = 4;
  72. topology[2] = 4;
  73. topology[3] = 1;
  74. NET network(topology);
  75. unsigned count = 0;
  76. while (true)
  77. {
  78. vector <double> input(2);
  79. input[0] = (double)rand() / RAND_MAX;
  80. input[1] = (double)rand() / RAND_MAX;
  81. vector <double> target(1);
  82. target[0] = (input[0] > 0.5 && input[1] < 0.5) || (input[0] < 0.5 && input[1] > 0.5);
  83. vector <double> output = network.forward(input);
  84. network.backward(target);
  85. double error = 0;
  86. for (int i= 0; i < output.size(); i++)
  87. error += abs(target[i] - output[i]);
  88. if (count % 1000 == 0)
  89. {
  90. cout << error << endl;
  91. }
  92. network.cleanup();
  93. count++;
  94. }
Add Comment
Please, Sign In to add comment