Advertisement
chessbucket

arduino nn deployment

Jun 16th, 2019
103
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.21 KB | None | 0 0
  1. #include <BasicLinearAlgebra.h>
  2. #include <math.h>
  3.  
  4. using namespace BLA;
  5.  
  6. double sigmoid(double a)
  7. {
  8. return (1 / (1 + exp(a * -1)));
  9. }
  10.  
  11.  
  12. /*
  13. void connectLayers(const Matrix &B, String act)
  14. {
  15. Multiply(in, weight, out);
  16. Add(out, bias, out);
  17. for (int i = 0; i < out.GetColCount(); i++)
  18. {
  19. if(act = "tanh")
  20. {
  21. out(0, i) = tanh(out(0, i));
  22. }
  23. else if(act = "sigmoid")
  24. {
  25. out(0, i) = sigmoid(out(0, i));
  26. }
  27. }
  28. }*/
  29.  
  30. void setup() {
  31. Serial.begin(9600);
  32.  
  33. float TempVal[40], SalVal[40];
  34. unsigned long timeStamp[40] = {0};
  35. unsigned long offset = micros();
  36. for (int i = 0; i < 40; i++)
  37. {
  38. timeStamp[i] = (micros() - offset);
  39. TempVal[i] = analogRead(A2);
  40. SalVal[i] = analogRead(A6);
  41. }
  42.  
  43. //Assuming deep, fully connected layers
  44. const int iS = 120; //# of neurons in input layer
  45. const int hS1 = 1; //# of neurons in hidden layer 1
  46. const int hS2 = 1; //# of neurons in hidden layer 2
  47. const int hS3 = 1; //# of neurons in hidden layer 3
  48. const int oS = 1; //# of neurons in output layer
  49.  
  50. BLA::Matrix <1, iS> input;
  51. BLA::Matrix <1, iS> resizeMin = {1.520E+02, 6.140E+02, 4.000E+00, 1.520E+02, 6.140E+02, 2.320E+02,
  52. 1.530E+02, 6.120E+02, 4.640E+02, 1.520E+02, 6.140E+02, 6.960E+02,
  53. 1.520E+02, 6.140E+02, 9.280E+02, 1.520E+02, 6.130E+02, 1.160E+03,
  54. 1.520E+02, 6.130E+02, 1.392E+03, 1.520E+02, 6.130E+02, 1.624E+03,
  55. 1.520E+02, 6.130E+02, 1.856E+03, 1.520E+02, 6.140E+02, 2.088E+03,
  56. 1.530E+02, 6.120E+02, 2.320E+03, 1.520E+02, 6.100E+02, 2.552E+03,
  57. 1.520E+02, 6.140E+02, 2.784E+03, 1.510E+02, 6.130E+02, 3.016E+03,
  58. 1.520E+02, 6.130E+02, 3.248E+03, 1.510E+02, 6.140E+02, 3.480E+03,
  59. 1.520E+02, 6.140E+02, 3.712E+03, 1.520E+02, 6.130E+02, 3.944E+03,
  60. 1.520E+02, 6.110E+02, 4.176E+03, 1.510E+02, 6.140E+02, 4.408E+03,
  61. 1.520E+02, 6.120E+02, 4.640E+03, 1.520E+02, 6.140E+02, 4.872E+03,
  62. 1.510E+02, 6.130E+02, 5.104E+03, 1.520E+02, 6.140E+02, 5.340E+03,
  63. 1.510E+02, 6.130E+02, 5.572E+03, 1.520E+02, 6.140E+02, 5.804E+03,
  64. 1.530E+02, 6.140E+02, 6.036E+03, 1.520E+02, 6.130E+02, 6.268E+03,
  65. 1.520E+02, 6.130E+02, 6.500E+03, 1.520E+02, 6.120E+02, 6.732E+03,
  66. 1.520E+02, 6.140E+02, 6.964E+03, 1.510E+02, 6.140E+02, 7.196E+03,
  67. 1.520E+02, 6.140E+02, 7.428E+03, 1.520E+02, 6.120E+02, 7.660E+03,
  68. 1.510E+02, 6.140E+02, 7.892E+03, 1.530E+02, 6.130E+02, 8.124E+03,
  69. 1.520E+02, 6.140E+02, 8.356E+03, 1.520E+02, 6.120E+02, 8.588E+03,
  70. 1.520E+02, 6.110E+02, 8.820E+03, 1.520E+02, 6.130E+02, 9.052E+03};
  71. BLA::Matrix <1, iS> resizeMax = {1.550E+02, 6.330E+02, 8.000E+00, 1.550E+02, 6.340E+02, 3.320E+02,
  72. 1.550E+02, 6.320E+02, 5.640E+02, 1.550E+02, 6.330E+02, 7.960E+02,
  73. 1.540E+02, 6.320E+02, 1.028E+03, 1.550E+02, 6.320E+02, 1.260E+03,
  74. 1.550E+02, 6.320E+02, 1.492E+03, 1.550E+02, 6.340E+02, 1.724E+03,
  75. 1.550E+02, 6.310E+02, 1.956E+03, 1.550E+02, 6.320E+02, 2.188E+03,
  76. 1.550E+02, 6.340E+02, 2.420E+03, 1.550E+02, 6.310E+02, 2.652E+03,
  77. 1.550E+02, 6.330E+02, 2.884E+03, 1.540E+02, 6.320E+02, 3.116E+03,
  78. 1.550E+02, 6.330E+02, 3.348E+03, 1.540E+02, 6.310E+02, 3.580E+03,
  79. 1.540E+02, 6.320E+02, 3.812E+03, 1.550E+02, 6.310E+02, 4.044E+03,
  80. 1.550E+02, 6.320E+02, 4.276E+03, 1.550E+02, 6.320E+02, 4.508E+03,
  81. 1.550E+02, 6.310E+02, 4.740E+03, 1.550E+02, 6.330E+02, 4.972E+03,
  82. 1.540E+02, 6.320E+02, 5.204E+03, 1.550E+02, 6.330E+02, 5.436E+03,
  83. 1.540E+02, 6.310E+02, 5.668E+03, 1.550E+02, 6.340E+02, 5.900E+03,
  84. 1.550E+02, 6.310E+02, 6.132E+03, 1.540E+02, 6.320E+02, 6.372E+03,
  85. 1.540E+02, 6.310E+02, 6.604E+03, 1.550E+02, 6.320E+02, 6.836E+03,
  86. 1.550E+02, 6.330E+02, 7.068E+03, 1.540E+02, 6.320E+02, 7.300E+03,
  87. 1.550E+02, 6.330E+02, 7.532E+03, 1.540E+02, 6.320E+02, 7.764E+03,
  88. 1.540E+02, 6.320E+02, 7.996E+03, 1.550E+02, 6.320E+02, 8.228E+03,
  89. 1.550E+02, 6.340E+02, 8.460E+03, 1.550E+02, 6.320E+02, 8.692E+03,
  90. 1.540E+02, 6.310E+02, 8.924E+03, 1.550E+02, 6.320E+02, 9.156E+03};
  91. for (int i = 0; i < input.GetColCount(); i+=3)
  92. {
  93. //xdata = (xdata - xmin) / (xmax - xmin) # element wise operations
  94. input(0, i) = (TempVal[i]-resizeMin(i))/(resizeMax(i)-resizeMin(i));
  95. input(0, i+1) = (SalVal[i+1]-resizeMin(i+1))/(resizeMax(i+1)-resizeMin(i+1));
  96. input(0, i+2)= (timeStamp[i+2]-resizeMin(i+2))/(resizeMax(i+2)-resizeMin(i+2));
  97. }
  98.  
  99. BLA::Matrix <1, hS1> hidden1;
  100. BLA::Matrix <1, hS2> hidden2;
  101. BLA::Matrix <1, hS3> hidden3;
  102. BLA::Matrix <1, oS> output;
  103. input.Fill(1);
  104. hidden1.Fill(1);
  105. hidden2.Fill(1);
  106. hidden3.Fill(1);
  107. output.Fill(1);
  108.  
  109. //weights from input -> hidden1
  110. BLA::Matrix<iS, hS1> w1 = { -2.251689787954092026E-03, 2.078449279069900513E-01, -6.174370646476745605E-02, -1.294941604137420654E-01, -1.960633248090744019E-01, 2.095378339290618896E-01, -1.128478348255157471E-01, 2.074594050645828247E-01, 1.797222197055816650E-01, -1.595804244279861450E-01, 1.617034673690795898E-01, 1.519532408565282822E-02, 1.532623022794723511E-01, -1.523475646972656250E-01, -1.060240492224693298E-01, 1.646637767553329468E-01, 1.431714743375778198E-01, -7.227062433958053589E-02, -1.439640522003173828E-01, 4.284194484353065491E-02, 2.209243178367614746E-01, 7.616808172315359116E-03, -2.310660481452941895E-01, -2.087087035179138184E-01, -1.661327630281448364E-01, 1.959848850965499878E-01, -1.410448998212814331E-01, -8.355728536844253540E-02, 5.052351579070091248E-02, 2.200548350811004639E-01, 5.016517825424671173E-03, -1.194399520754814148E-01, -1.496096402406692505E-01, -9.792125225067138672E-02, 1.715912818908691406E-01, -1.294056177139282227E-01, -1.837181299924850464E-01, 1.814135462045669556E-01, -1.262866109609603882E-01, -1.174561679363250732E-01, -1.747280657291412354E-01, -6.911590695381164551E-02, -1.444210261106491089E-01, 1.909952163696289062E-01, -1.836621314287185669E-01, -2.292088866233825684E-01, 6.661105155944824219E-02, -9.071853756904602051E-02, 1.107472404837608337E-01, -1.990463733673095703E-01, 1.913081407546997070E-01, -2.117955386638641357E-01, -1.817548722028732300E-01, 1.037685573101043701E-01, -1.178410351276397705E-01, -2.112298905849456787E-01, -1.337243765592575073E-01, -1.069791018962860107E-01, -7.672908157110214233E-02, 1.246274113655090332E-01, 1.839692592620849609E-01, 1.742247343063354492E-01, -1.339842528104782104E-01, -1.048430949449539185E-01, 5.569282919168472290E-02, 1.340724080801010132E-01, -1.821028739213943481E-01, 8.409734070301055908E-02, 8.550775796175003052E-02, -1.375128775835037231E-01, -1.748649030923843384E-02, -8.476732671260833740E-02, 8.759501576423645020E-02, 5.966226384043693542E-02, -1.625532656908035278E-01, -2.238687723875045776E-01, -1.820734143257141113E-01, 7.845860719680786133E-02, -2.067508101463317871E-01, -2.267828583717346191E-01, 1.421210914850234985E-02, 1.710806638002395630E-01, -1.220725029706954956E-01, -1.230347454547882080E-01, -2.408410236239433289E-02, -1.400239169597625732E-01, 1.552093476057052612E-01, 1.353525519371032715E-01, -8.251619525253772736E-03, 6.090170703828334808E-03, -1.159833297133445740E-01, -5.444481223821640015E-02, -2.193169742822647095E-01, -1.008751392364501953E-01, -2.081227153539657593E-01, -3.562032803893089294E-02, 1.269473433494567871E-01, -6.943794339895248413E-02, 5.718484171666204929E-04, 1.164589449763298035E-01, 1.406312286853790283E-01, 2.029875218868255615E-01, -1.916677653789520264E-01, -1.550870686769485474E-01, 4.468404501676559448E-02, -1.010407432913780212E-01, 1.392701119184494019E-01, 2.657910995185375214E-02, -1.483597457408905029E-01, 1.252990067005157471E-01, -1.630185246467590332E-01, -5.802119150757789612E-02, -2.273037135601043701E-01, -4.355586320161819458E-02, 1.874085068702697754E-01, -1.690577715635299683E-01, -9.867803892120718956E-04, -1.497109234333038330E-01, -1.793888360261917114E-01, 1.655142754316329956E-01};
  111. //weights from hidden1 -> hidden2
  112. BLA::Matrix<hS1, hS2> w2 = {-1.386107921600341797E+00};
  113. //weights from hidden2 -> hidden3
  114. BLA::Matrix<hS2, hS3> w3 = {-6.963229179382324219E-01};
  115. //weights from hidden3 -> output
  116. BLA::Matrix<hS3, oS> w4 = {1.730884790420532227E+00};
  117.  
  118. //biases from input -> hidden1
  119. BLA::Matrix<1, hS1> b1 = {-1.929938048124313354E-02};
  120. //biases from hidden1 -> hidden2
  121. BLA::Matrix<1, hS2> b2 = {5.753378942608833313E-02};
  122. //biases from hidden2 -> hidden3
  123. BLA::Matrix<1, hS3> b3 = {-3.557488322257995605E-01};
  124. //biases from hidden3 -> output
  125. BLA::Matrix<1, oS> b4 = {-3.349284529685974121E-01};
  126.  
  127. Multiply(input, w1, hidden1);
  128. Add(hidden1, b1, hidden1);
  129. for (int i = 0; i < hidden1.GetColCount(); i++)
  130. {
  131. hidden1(0, i) = tanh(hidden1(0, i));
  132. }
  133.  
  134. Multiply(hidden1, w2, hidden2);
  135. Add(hidden2, b2, hidden2);
  136. for (int i = 0; i < hidden2.GetColCount(); i++)
  137. {
  138. hidden2(0, i) = tanh(hidden2(0, i));
  139. }
  140.  
  141. Multiply(hidden2, w3, hidden3);
  142. Add(hidden3, b3, hidden3);
  143. for (int i = 0; i < hidden3.GetColCount(); i++)
  144. {
  145. hidden3(0, i) = tanh(hidden3(0, i));
  146. }
  147.  
  148. Multiply(hidden3, w4, output);
  149. Add(output, b4, output);
  150. for (int i = 0; i < output.GetColCount(); i++)
  151. {
  152. output(0, i) = tanh(output(0, i));
  153. }
  154. Serial.print("Prediction: ");
  155. Serial.println(output(0, 0));
  156. }
  157. void loop()
  158. {
  159.  
  160. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement