Advertisement
Guest User

Untitled

a guest
May 28th, 2015
363
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.83 KB | None | 0 0
  1.  
  2.  
  3. #include <LiquidCrystal.h>
  4. #include <LCDKeypad.h>
  5.  
  6.  
  7. //// VARIABILI
  8. const int NX=1; //NUMERO INGRESSI
  9. const int NH=4; //NUMERO NEURONI STATO CENTRALE
  10. const int NY=1; //NUMERO USCITE
  11. const int M=17; //NUMERO DI CAMPIONI D'INGRESSO (INPUT DATA SET)
  12. double H[NH]; //VETTORE USCITE NEURONI
  13. double Y[NY]; //VETTORE USCITE
  14. long double A=0; //E^A "attivazione interna"
  15. double X[NX]; //VETTORE VARIABILI INGRESSI
  16.  
  17. //matrici random:
  18. int RM0[NX][NH];
  19. int RM1[NH][NY];
  20.  
  21.  
  22.  
  23. void setup (){
  24. Random_Matrix_0(NX,NH);
  25. Random_Matrix_1(NX,NH);
  26. }
  27.  
  28. void loop(){
  29.  
  30.  
  31. double Xi[]={0.0000,0.0625,0.1250,0.1875,0.2500,0.3125,0.3750,0.4375,0.5000,0.5625,0.6250,0.6875,0.7500,0.8125,0.8750,0.9375,1.0000};
  32.  
  33. for(int l=0; l<16; l++){ //scorre il vettore d'ingresso
  34. Xi[l]=Xi[l+1];
  35. neural(1,4,1,Xi[l]);
  36. }
  37. }
  38.  
  39.  
  40.  
  41. void neural(int NX, int NH, int NY,int Zeta )
  42. {
  43.  
  44. //DEFINE VARIABLES
  45.  
  46.  
  47.  
  48.  
  49. double wh[NH][NX+1]; //PESI
  50. double wy[NY][NH+1]; //VETTORE DELLE USCITE DEI PESI DELLE USCITE DELL'IESIMO STRATO
  51. int i,j,k=0;//INDICI
  52.  
  53.  
  54.  
  55. //CALCULATE THE LAYER H
  56. for (k=1; k<=NH; k++){
  57. for (i=1; i<=NX;i++){
  58. A=A+(wh[k][i]*X[i]); //CALCULATE THE INTERNAL ACTIVATION FUNCTION A, that is the sum wighted of all the input signals
  59. A=A+wh[k][NX+1];
  60. H[k]=1.0/(1.0+exp(-A)); //function of weight of the singolar layer
  61. }}
  62.  
  63.  
  64. //CALCULATE THE LAYER Y //same thing but for the next layer(Y)
  65. for (j=1; j<=NY; j++){
  66. A=0; //initzialize to zero
  67. for(k=1; k<=NH; k++){
  68. A += (wy[j][k]*H[k]);
  69. A += wy[j][NH+1];
  70. Y[j]=1.0/(1.0+exp(-A));
  71. }}
  72.  
  73. //OPTIONAL STRUCTURE FOR LEARNING
  74.  
  75. const double DELTA=0.3; //speed learning
  76. long double Err=0; //error variable (set point-real output)
  77. double D [NY]; //set point output desired
  78. double DeltaY [NY]; //Backpropagation Y
  79. double DeltaH [NH]; // Backpropagation H
  80.  
  81. //BACK PROPAGATION
  82.  
  83. //Calcolo errore strato Y
  84. for (j=1; j<=NY; j++) {
  85. Err=D[j]-Y[j];
  86. DeltaY[j]=Err*Y[j]*(1-Y[j]);
  87. }
  88.  
  89. //Calcolo errore strato H
  90. for (k=1; k<=NH; k++){
  91. Err=0;
  92. for (j=1; j<=NY; j++) {
  93. Err=Err+(DeltaY[j]*wy[j][k]);
  94. DeltaH[k]=Err*H[k]*(1-H[k]);
  95. }}
  96.  
  97. //Modifica pesi strato Y
  98. for (j=1; j<=NY; j++){
  99. for (k=1; k<=NH; k++){
  100. wy[j][k]=wy[j][k]+(DELTA*DeltaY[j]*H[k]);
  101. wy[j][NH+1]=wy[j][NH+1]+(DELTA*DeltaY[j]);
  102. }}
  103.  
  104. //Modifica pesi strato H
  105. for (k=1; k<=NH; k++){
  106. for (i=1; i<=NX; i++){
  107. wh[k][i]=wh[k][i]+(DELTA*DeltaH[k]*X[i]);
  108. wh[k][NX+1]=wh[k][NX+1]+(DELTA*DeltaH[k]);
  109. }}
  110. //implicitamente restituisce NY
  111. }
  112.  
  113. void Random_Matrix_0(int A,int B){
  114. for (int a=1; a<=A;a++){
  115. for (int b=1; b<=B; b++){
  116. int rand =(double) random(0, 1000);
  117. RM0[a][b]= (rand/500)-1;
  118. }}
  119. }
  120. void Random_Matrix_1(int A,int B){
  121. for (int a=1; a<=A;a++){
  122. for (int b=1; b<=B; b++){
  123. int rand =(double) random(0, 1000);
  124. RM1[a][b]= (rand/500)-1;
  125. }}
  126. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement