Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #include<iostream>
- #include<conio.h>
- #include<math.h>
- using namespace std;
- static int j=0;
- class NodeOut
- {
- private:
- const double lambda=1; //For Activation Function.
- const double c=0.5; // Learning Constant
- double w[10], del_w[10], out, error, sum;
- int nodes_lim;
- public:
- NodeOut(): out(0), error(0), sum(0), nodes_lim(0)
- {
- for(int i=0;i<10;i++)
- {
- w[i]=del_w[i]=0;
- }
- }
- //To set weights and find number of nodes connected to this in hidden layer.
- void set_w(double weight[], int num_nodes)
- {
- nodes_lim = num_nodes;
- for(int i=0; i<nodes_lim; i++)
- {
- w[i]=weight[i];
- }
- }
- //To calculate sum of inputs (hidden layer's outputs)
- double activation(double x[]) //The output of the node is returned. :)
- {
- for(int i=0;i<nodes_lim;i++)
- {
- sum+=w[i]*x[i];
- }
- //out = sum;
- out=1/(1+exp(-1*lambda*(sum+1)));
- return out;
- }
- void error_calc(double target, double out_hid[], double back_err[])//back_err passed by reference.
- {
- error=out*(1-out)*(target-out);//Calculate Error at node.
- for(int i=0; i<nodes_lim; i++)
- {
- del_w[i] = c*error*out_hid[i];//Calculate increment to input weights.
- back_err[i]=w[i]*error;//Backpropagate the error to previous layer.
- }
- }
- void error_update()
- {
- for(int i=0;i<nodes_lim;i++)
- w[i]+=del_w[i];
- }
- };
- /*
- Each hidden layer node is represented by an object of the class NodeH.
- 1 hidden layer node has all input weights stored locally.
- To keep track of various nodes, each layer of nodes are counted from 0 onwards, left to right.
- Thus, input layer nodes would start at 0 at the extreme left and go on to the maximum value at extreme right. Similarly, hidden layer nodes will start from 0 at extreme left and go on...
- */
- class NodeH
- {
- private:
- int temp=0;
- const double lambda=1; //For Activation Function.
- const double c=0.5; // Learning Constant
- double w[20], del_w[20], out, error, sum;
- int start, stop; //Used to number start of connections with previous layer and end of connections with previous layer.
- public:
- NodeH(): out(0), error(0), sum(0), start(0), stop(0)
- {
- for(int i=0;i<20;i++)
- {
- w[i]=0; del_w[i]=0;
- }
- }
- void set_w(double weight[],int start_node, int stop_node)//Set Node input weights as per requirement
- {
- start=start_node;stop=stop_node;
- for(int i=start_node;i<=stop_node;i++)
- {
- w[i]=weight[i];
- }
- printf("start=%d and stop=%d",start, stop);
- }
- //Calculate Sum at node from locally available input
- void activation(double x[])//x is the complete output of input layer nodes. CAN!
- {
- for(int i=start; i<=stop; i++)
- {
- sum+=w[i]*x[i];
- }
- out = 1/(1+exp(-1*lambda*(sum+1))); //Output using Sigmoid Function
- }
- double get_output()
- {
- return out;
- }
- //The output node's job is to multiply its error with its input weight and pass the product on to the corresponding hidden layer
- //node. The hidden layer node's job is to accept every such back-propagated error from the output layer nodes (In our case, there
- //will only be 1 such output layer node) and then, by summing, form the total back-propagated error.
- void backProp(double back_prop_err, double x[])// x is the complete output of input layer neurons.
- {
- //j++;
- //printf("\nA"); printf("\nj=%d",j);
- error = out*(1-out)*back_prop_err;
- printf("\nB:%20.18f",error);
- //if(j==8)
- //{
- // printf("\nstart= %d, stop= %d",start, stop);
- // temp=1;
- //}
- for(int i=start; i<= stop; i++)
- {
- // if(i==24)
- // break;
- // if(temp==1)
- // {
- // printf("\nstart= %d and stop= %d", start, stop); temp=0;
- // }
- //j++;
- //printf("\nC");
- del_w[i] = c*error*x[i];
- printf("\ndel_w[%d]=%20.18f",i,del_w[i]);
- }
- }
- void error_update()
- {
- for(int i=start; i<=stop; i++)
- w[i]+=del_w[i];
- }
- };
- /*
- NodeIn is the class for objects of Input Node. Each node only has 1 input to it.
- */
- class NodeIn
- {
- private:
- double in, out; //Main function will collect each output from individual nodes and form an array.
- const double lambda=1;
- public:
- NodeIn(): in(0), out(0) {}
- void get_input(double input)
- {
- in = input;
- }
- double activation()
- {
- //out = in;
- out=1/(1+exp(-1*lambda*(in+1)));
- //return out;
- }
- double get_output()
- {
- return out;
- }
- };
- int main()
- {
- int i;
- NodeIn N_In[24]; //24 Input Nodes.
- NodeH N_Hid[10]; //10 Hidden layer nodes.
- NodeOut N_Out; //One output node.
- double w1[] = {1e-1, 5e-1, -1e-1, 69e-1, 4e-1, 1e-1}; //Initial random weights for micro1 hidden layer.
- double w2[] = {5e-9, 23e-9, 55e-9, 36e-9, 45e-9, 74e-9, 11e-9, 12e-9, 11e-9, 63e-9, -16e-9, 4e-9, 5e-9}; //Initial random weights for micro2 hidden layer.
- double w3[] = {74e-9, 12e-9, 69e-9, -13e-9, -12e-9}; //Initial random weights for hour information hidden nodes.
- double w4[] = {11e-9, 22e-9, 45e-9, 82e-9, 3e-9, 1e-9, 22e-9, -11e-9, -13e-9, -12e-9, 12e-9, 42e-9, 33e-9, 19e-9, 24e-9, 26e-9, 82e-9, 73e-9, 12e-9, 21e-9, 23e-9, 21e-9, 64e-9, 1e-9}; //initial random weights for load information hidden nodes.
- double w5[] = {11e-9, 12e-9, 1e-9, 21e-9, 3e-9, 4e-9, 31e-9, 12e-9, 16e-9, -14e-9}; //Output layer initial random weights.
- double back_err[10]; //Backerror from the output node stored here for reference of hidden layer.
- double output1[24]; //Store copy of output of input layer for quick access.
- double output2[10]; //Store copy of output of hidden layer for quick access.
- double output; //Output of output layer.
- double target=0.85; //Expected output for input. Used during training. If not training, then set to 0.
- double input[24]={0.34, 0.36, 0.24, 0.32, 0.25, 0.42, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0.32, 0.43, 0.34, 0.52, 0.32, 0.55};
- //double input[24]={0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
- //for(i=0;i<24;i++)
- //input[i]*=1000;
- // Order of input:
- //Avg temp(0D), Avg Temp(-1D), Avg Temp(-2D),
- //Hourly Temp(0H), H Temp(-3H), H Temp(-6H), These might be predicted temp...
- //S, M, T, W, T, F, S : 1 for weekdays. 0.5 for weekends.
- //5 bits for hour of day.
- //Load (0H), Load (-2H), Load (-3H),
- //Last year's loads, same as above. Load data has to be prescaled to 0 - 1 scale.
- //Testing code with one input... For final code, make a double dimension array...
- //Specify different connections for the hidden layer.
- for(i=0;i<3;i++)
- N_Hid[i].set_w(w1, 0, 5);//Hidden nodes 0 through 2 connected to input nodes 0 through 5.
- printf("\n1");
- N_Hid[3].set_w(w2, 0, 12); //Hidden node 3 connected to input nodes 0 through 12.
- for(i=4;i<7;i++)
- {
- N_Hid[i].set_w(w3,13,17);//Hidden nodes 4 through 6 connected to input nodes 13 through 17.
- N_Hid[i+3].set_w(w4,0,23);//Hidden nodes 4+3 through 6+3 connected to the whole of input layer.
- printf("\n2");
- }
- N_Out.set_w(w5, 10); //Set output layer weights.
- while(target!=0)
- {
- //Calculate outout of input layer.
- for(i=0; i<24; i++)//Input layer.
- {
- N_In[i].get_input(input[i]); //Input given.
- N_In[i].activation(); //Activation function used.
- output1[i]=N_In[i].get_output(); //Get output and store in array.
- printf("\nN_In[%d]=%20.18f",i,output1[i]);
- }
- printf("\n3");
- //Pass to hidden layer.
- //redo: printf("x"); printf("y");
- for(i=0;i<10;i++)
- {
- printf("\n3.5");
- N_Hid[i].activation(output1);//Calculate activation of hidden layer nodes.
- output2[i]=N_Hid[i].get_output();//Store outputs.
- printf("\nN_Hid[%d]=%20.18f",i,output1[i]);
- }
- printf("\n4");
- //On to output layer.
- output = N_Out.activation(output2);
- printf("\nN_Out=%20.18f",output);
- printf("\n5");
- //Training (Or not)...
- if(target!=0)
- {
- if(!((output-target)<0.07&&(output-target)>-0.07)) // Setting tolerance level.
- {
- //for(i=0;i<10;i++)
- //N_Hid[i].error_update();
- printf("\n6");
- N_Out.error_calc(target, output2, back_err); //Calculate error and backpropagate down to hidden layer.
- for(i=0;i<10;i++)
- printf("\nback_err[i]=%20.18f",back_err[i]);
- N_Out.error_update(); //Change output layer weights.
- for(i=0;i<11;i++)
- {
- printf("\n6.5");
- N_Hid[i].backProp(back_err[i], output1); //Backpropagation for hidden layer.
- printf("\n6.51");
- N_Hid[i].error_update(); //Update Hidden Layer weights.
- printf("\n6.52"); continue;
- getch();
- }//
- printf("\n7");
- for(i=0;i<10;i++)
- N_Hid[i].error_update(); //Update Hidden Layer weights.
- printf("\n8");
- //Recalculate output and check again for error.
- //goto redo;
- //continue;
- }
- else
- break;
- }
- //Training done.
- //printf("\nTrained. Output = %20.18f", output);
- //break;
- }
- printf("\nTrained. Output = %20.18f", output);
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement