Advertisement
Guest User

C++ Code for a specific Neural Network

a guest
May 19th, 2013
127
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 8.73 KB | None | 0 0
  1. #include<iostream>
  2. #include<conio.h>
  3. #include<math.h>
  4. using namespace std;
  5. static int j=0;
  6. class NodeOut
  7. {
  8.     private:
  9.         const double lambda=1; //For Activation Function.
  10.         const double c=0.5; // Learning Constant
  11.         double w[10], del_w[10], out, error, sum;
  12.         int nodes_lim;
  13.     public:
  14.         NodeOut(): out(0), error(0), sum(0), nodes_lim(0)
  15.         {
  16.             for(int i=0;i<10;i++)
  17.             {
  18.                 w[i]=del_w[i]=0;
  19.             }
  20.         }
  21.         //To set weights and find number of nodes connected to this in hidden layer.
  22.         void set_w(double weight[], int num_nodes)
  23.         {
  24.             nodes_lim = num_nodes;
  25.             for(int i=0; i<nodes_lim; i++)
  26.             {
  27.                 w[i]=weight[i];
  28.             }
  29.         }
  30.         //To calculate sum of inputs (hidden layer's outputs)
  31.         double activation(double x[]) //The output of the node is returned. :)
  32.         {
  33.             for(int i=0;i<nodes_lim;i++)
  34.             {
  35.                 sum+=w[i]*x[i];
  36.             }
  37.             //out = sum;
  38.             out=1/(1+exp(-1*lambda*(sum+1)));
  39.             return out;
  40.         }
  41.         void error_calc(double target, double out_hid[], double back_err[])//back_err passed by reference.
  42.         {
  43.             error=out*(1-out)*(target-out);//Calculate Error at node.
  44.             for(int i=0; i<nodes_lim; i++)
  45.             {
  46.                 del_w[i] = c*error*out_hid[i];//Calculate increment to input weights.
  47.                 back_err[i]=w[i]*error;//Backpropagate the error to previous layer.
  48.             }
  49.         }
  50.         void error_update()
  51.         {
  52.             for(int i=0;i<nodes_lim;i++)
  53.                 w[i]+=del_w[i];
  54.         }
  55. };
  56.  
  57. /*
  58. Each hidden layer node is represented by an object of the class NodeH.
  59. 1 hidden layer node has all input weights stored locally.
  60. To keep track of various nodes, each layer of nodes are counted from 0 onwards, left to right.
  61. Thus, input layer nodes would start at 0 at the extreme left and go on to the maximum value at extreme right.  Similarly, hidden layer nodes will start from 0 at extreme left and go on...
  62. */
  63. class NodeH
  64. {
  65.     private:
  66.         int temp=0;
  67.         const double lambda=1; //For Activation Function.
  68.         const double c=0.5; // Learning Constant
  69.         double w[20], del_w[20], out, error, sum;
  70.         int start, stop; //Used to number start of connections with previous layer and end of connections with previous layer.
  71.     public:
  72.         NodeH(): out(0), error(0), sum(0), start(0), stop(0)
  73.         {
  74.             for(int i=0;i<20;i++)
  75.             {
  76.                 w[i]=0; del_w[i]=0;
  77.             }
  78.         }
  79.         void set_w(double weight[],int start_node, int stop_node)//Set Node input weights as per requirement
  80.         {
  81.             start=start_node;stop=stop_node;
  82.             for(int i=start_node;i<=stop_node;i++)
  83.             {
  84.                 w[i]=weight[i];
  85.             }
  86.             printf("start=%d and stop=%d",start, stop);
  87.         }
  88.         //Calculate Sum at node from locally available input
  89.         void activation(double x[])//x is the complete output of input layer nodes. CAN!
  90.         {
  91.             for(int i=start; i<=stop; i++)
  92.             {
  93.                 sum+=w[i]*x[i];
  94.             }
  95.             out = 1/(1+exp(-1*lambda*(sum+1))); //Output using Sigmoid Function
  96.         }
  97.         double get_output()
  98.         {
  99.             return out;
  100.         }
  101.         //The output node's job is to multiply its error with its input weight and pass the product on to the corresponding hidden layer
  102.         //node. The hidden layer node's job is to accept every such back-propagated error from the output layer nodes (In our case, there
  103.         //will only be 1 such output layer node) and then, by summing, form the total back-propagated error.
  104.         void backProp(double back_prop_err, double x[])// x is the complete output of input layer neurons.
  105.         {
  106.             //j++;
  107.             //printf("\nA"); printf("\nj=%d",j);
  108.             error = out*(1-out)*back_prop_err;
  109.             printf("\nB:%20.18f",error);
  110.             //if(j==8)
  111.             //{
  112.             //      printf("\nstart= %d, stop= %d",start, stop);
  113.             //      temp=1;
  114.             //}
  115.             for(int i=start; i<= stop; i++)
  116.             {
  117.             //  if(i==24)
  118.             //      break;
  119.             //  if(temp==1)
  120.             //  {
  121.             //      printf("\nstart= %d and stop= %d", start, stop); temp=0;
  122.             //  }
  123.                 //j++;
  124.                 //printf("\nC");
  125.                 del_w[i] = c*error*x[i];
  126.                 printf("\ndel_w[%d]=%20.18f",i,del_w[i]);
  127.             }
  128.         }
  129.         void error_update()
  130.         {
  131.             for(int i=start; i<=stop; i++)
  132.                 w[i]+=del_w[i];
  133.         }
  134. };
  135.  
  136. /*
  137. NodeIn is the class for objects of Input Node. Each node only has 1 input to it.
  138. */
  139. class NodeIn
  140. {
  141.     private:
  142.         double in, out; //Main function will collect each output from individual nodes and form an array.
  143.         const double lambda=1;
  144.     public:
  145.         NodeIn(): in(0), out(0) {}
  146.         void get_input(double input)
  147.         {
  148.             in = input;
  149.         }
  150.         double activation()
  151.         {
  152.             //out = in;
  153.             out=1/(1+exp(-1*lambda*(in+1)));
  154.             //return out;
  155.         }
  156.         double get_output()
  157.         {
  158.             return out;
  159.         }
  160. };
  161.  
  162. int main()
  163. {
  164.     int i;
  165.     NodeIn N_In[24]; //24 Input Nodes.
  166.     NodeH N_Hid[10]; //10 Hidden layer nodes.
  167.     NodeOut N_Out; //One output node.
  168.     double w1[] = {1e-1, 5e-1, -1e-1, 69e-1, 4e-1, 1e-1}; //Initial random weights for micro1 hidden layer.
  169.     double w2[] = {5e-9, 23e-9, 55e-9, 36e-9, 45e-9, 74e-9, 11e-9, 12e-9, 11e-9, 63e-9, -16e-9, 4e-9, 5e-9}; //Initial random weights for micro2 hidden layer.
  170.     double w3[] = {74e-9, 12e-9, 69e-9, -13e-9, -12e-9}; //Initial random weights for hour information hidden nodes.
  171.     double w4[] = {11e-9, 22e-9, 45e-9, 82e-9, 3e-9, 1e-9, 22e-9, -11e-9, -13e-9, -12e-9, 12e-9, 42e-9, 33e-9, 19e-9, 24e-9, 26e-9, 82e-9, 73e-9, 12e-9, 21e-9, 23e-9, 21e-9, 64e-9, 1e-9}; //initial random weights for load information hidden nodes.
  172.     double w5[] = {11e-9, 12e-9, 1e-9, 21e-9, 3e-9, 4e-9, 31e-9, 12e-9, 16e-9, -14e-9}; //Output layer initial random weights.
  173.     double back_err[10]; //Backerror from the output node stored here for reference of hidden layer.
  174.     double output1[24]; //Store copy of output of input layer for quick access.
  175.     double output2[10]; //Store copy of output of hidden layer for quick access.
  176.     double output; //Output of output layer.
  177.     double target=0.85; //Expected output for input. Used during training. If not training, then set to 0.
  178.     double input[24]={0.34, 0.36, 0.24, 0.32, 0.25, 0.42, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0.32, 0.43, 0.34, 0.52, 0.32, 0.55};
  179.     //double input[24]={0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
  180.     //for(i=0;i<24;i++)
  181.         //input[i]*=1000;
  182.     // Order of input:
  183.     //Avg temp(0D), Avg Temp(-1D), Avg Temp(-2D),
  184.     //Hourly Temp(0H), H Temp(-3H), H Temp(-6H), These might be predicted temp...
  185.     //S, M, T, W, T, F, S : 1 for weekdays. 0.5 for weekends.
  186.     //5 bits for hour of day.
  187.     //Load (0H), Load (-2H), Load (-3H),
  188.     //Last year's loads, same as above. Load data has to be prescaled to 0 - 1 scale.
  189.     //Testing code with one input... For final code, make a double dimension array...
  190.    
  191.     //Specify different connections for the hidden layer.
  192.     for(i=0;i<3;i++)
  193.         N_Hid[i].set_w(w1, 0, 5);//Hidden nodes 0 through 2 connected to input nodes 0 through 5.
  194.     printf("\n1");
  195.     N_Hid[3].set_w(w2, 0, 12); //Hidden node 3 connected to input nodes 0 through 12.  
  196.     for(i=4;i<7;i++)
  197.     {
  198.         N_Hid[i].set_w(w3,13,17);//Hidden nodes 4 through 6 connected to input nodes 13 through 17.
  199.         N_Hid[i+3].set_w(w4,0,23);//Hidden nodes 4+3 through 6+3 connected to the whole of input layer.
  200.         printf("\n2");
  201.     }
  202.     N_Out.set_w(w5, 10); //Set output layer weights.
  203.    
  204.     while(target!=0)
  205.     {
  206.         //Calculate outout of input layer.
  207.         for(i=0; i<24; i++)//Input layer.
  208.         {
  209.             N_In[i].get_input(input[i]); //Input given.
  210.             N_In[i].activation(); //Activation function used.
  211.             output1[i]=N_In[i].get_output(); //Get output and store in array.
  212.             printf("\nN_In[%d]=%20.18f",i,output1[i]);
  213.         }
  214.         printf("\n3");
  215.         //Pass to hidden layer.
  216.         //redo: printf("x"); printf("y");
  217.         for(i=0;i<10;i++)
  218.         {
  219.             printf("\n3.5");
  220.             N_Hid[i].activation(output1);//Calculate activation of hidden layer nodes.
  221.             output2[i]=N_Hid[i].get_output();//Store outputs.
  222.             printf("\nN_Hid[%d]=%20.18f",i,output1[i]);
  223.         }
  224.         printf("\n4");
  225.         //On to output layer.
  226.         output = N_Out.activation(output2);
  227.         printf("\nN_Out=%20.18f",output);
  228.         printf("\n5");
  229.         //Training (Or not)...
  230.         if(target!=0)
  231.         {
  232.             if(!((output-target)<0.07&&(output-target)>-0.07)) // Setting tolerance level.
  233.             {
  234.                 //for(i=0;i<10;i++)
  235.                     //N_Hid[i].error_update();
  236.                 printf("\n6");
  237.                 N_Out.error_calc(target, output2, back_err); //Calculate error and backpropagate down to hidden layer.
  238.                 for(i=0;i<10;i++)
  239.                     printf("\nback_err[i]=%20.18f",back_err[i]);
  240.                 N_Out.error_update(); //Change output layer weights.
  241.                 for(i=0;i<11;i++)
  242.                 {
  243.                     printf("\n6.5");
  244.                     N_Hid[i].backProp(back_err[i], output1); //Backpropagation for hidden layer.
  245.                     printf("\n6.51");
  246.                     N_Hid[i].error_update(); //Update Hidden Layer weights.
  247.                     printf("\n6.52"); continue;
  248.                     getch();
  249.                 }//
  250.                 printf("\n7");
  251.                 for(i=0;i<10;i++)
  252.                     N_Hid[i].error_update(); //Update Hidden Layer weights.
  253.                 printf("\n8");
  254.                 //Recalculate output and check again for error.
  255.                 //goto redo;
  256.                 //continue;
  257.             }
  258.             else
  259.                 break;
  260.         }
  261.         //Training done.
  262.         //printf("\nTrained. Output = %20.18f", output);
  263.         //break;
  264.     }
  265.     printf("\nTrained. Output = %20.18f", output);
  266. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement