Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- //--------------------------------------------------------------------------------------------------------------
- //
- // --> w1[0][0]---------> net1[0] > tanh(sum...)-->out1[0]--> w2[0]-------|
- // X -----> w1[0][1]-- | net1[1] |
- // --> w1[0][2]-- | net1[2] ---- " --------out1[2]--> w2[2]-------|
- // ... | ... --> out2 (=tanh(sum..))-- sign()---> +1
- // | w2[N-1] | |
- // --> w1[1][0]------- | --> -1
- // Y -----> w1[1][0]-- | |
- // --> w1[1][0]-- | |
- // ... | |
- // | |-1| --> wb-------------------|
- // --> w1[2][0]------- (für bias)
- // |-1| ----> w1[2][0]--
- // --> w1[2][0]--
- //
- //
- //--------------------------------------------------------------------------------------------------------------
- #include<stdio.h>
- #include<stdlib.h>
- #include<math.h>
- #include<time.h>
- //==========================================//
- #define N 20 // Neurons in Hidden Layer //<<===== Dynamic
- #define maxrounds 30000
- #define tanhd(x) (1-tanh(x)*tanh(x)) //
- double winit = 0.2; // [-;+] //
- double etha = 0.001; //
- //==========================================//
- //---------------------------------------------------//
- double x[1000] = {0};
- double y[1000] = {0};
- double z[1000] = {0};
- //---------------------------------------------------//
- double bias = -1;
- double w1[3][N];
- double w2[N];
- double wb;
- double delta1[N];
- double delta2;
- double out1[N];
- double out2;
- double net1[N];
- double net2;
- // ----------------FUNCTIONS ------------------------//
- double randdouble(double a, double b);
- void read_input();
- void init();
- double forward(double x,double y);
- void plotnet();
- void plotw1w2n();
- double sign(double x);
- void train_net();
- void evaluate();
- // --------------------------------------------------//
- int main(){
- srand(time(NULL));
- read_input();
- init();
- train_net();
- //plotw1w2n();
- evaluate();
- return 0;
- } // END MAIN
- // --------------Subfunctions------------------------//
- void plot_tanhd(){
- for(int i = 0; i < 100; i++){
- double bb = (i-50)/5.0;
- printf("%7.3lf : %lf\n",bb,tanhd(bb));
- }
- }
- void evaluate(){
- double a,b;
- int flag;
- flag = scanf("%lf,%lf\n", &a,&b);
- while(flag > 0){
- if(sign(forward(a,b)) == 1)
- printf("+1\n");
- else
- printf("-1\n");
- flag = scanf("%lf,%lf\n", &a,&b);
- }
- }
- double randdouble(double a, double b){
- double x = rand()/(double)RAND_MAX;
- x=x*(b-a)+a;
- return x;
- }
- void read_input(){
- for(int i = 0; i < 1000; i++){
- scanf("%lf,%lf,%lf\n",&x[i],&y[i],&z[i]);
- if(x[i]==0 && y[i]==0 && z[i]==0)
- break;
- }
- }
- void init(){
- for(int i = 0; i < N; i++){
- w1[0][i] = randdouble(-winit,winit); //for x
- w1[1][i] = randdouble(-winit,winit); //for y
- w1[2][i] = randdouble(-winit,winit); //for bias
- w2[i] = randdouble(-winit,winit); // for n[i]
- }
- wb = randdouble(-winit,winit); //for bias
- }
- double forward(double x,double y){
- //printf("x:%lf y:%lf\n",x,y);
- for(int i = 0; i < N; i++){
- net1[i] = ( w1[0][i]*x + w1[1][i]*y + w1[2][i]*bias );
- out1[i] = tanh(net1[i]);
- // printf("n1 %lf,",n[i]);
- }
- net2 = 0; // then sum all together
- for(int i = 0; i < N; i++){
- net2 = net2 + w2[i]*out1[i];
- }
- net2 = net2 + wb*bias; // sumation finished
- out2 = tanh(net2); // linear or tanh ???
- return out2;
- }
- void plotnet(){
- double x,y;
- for(int i = 50; i >= -50; i--){
- y = i/5.0;
- for(int k = -50; k <= 50; k++){
- x = k/5.0;
- if(x == 0 && y == 0)
- printf("0");
- else if(y == 0 && ( x == 1 || x == -1) )
- printf("1");
- else if(y == 0 && ( x == 5 || x == -5) )
- printf("5");
- else if(x == 0 && ( y == 5 || y == -5) )
- printf("5");
- else if(x == 0 && ( y == 1 || y == -1) )
- printf("1");
- else if(sign(forward(x,y))==1)
- printf("+");
- else
- printf("-");
- }
- printf("\n");
- }
- }
- void plotw1w2n(){
- for(int i = 0; i < N; i++){
- printf("w1x:%lf, w1y:%lf, w1b:%lf, w2:%lf, n:%lf\n",w1[0][i],w1[1][i],w1[2][i],w2[i],net1[i]);
- }
- }
- double sign(double val){
- if(val >= 0) return 1.0;
- else return -1.0;
- }
- void train_net(){
- int count = 0;
- int flag;
- while(1){
- //printf("w1[0][0], w1[1][0], w1[2][0], w2[0], n[0]\n");
- //printf("%8.4lf %8.4lf %8.4lf %8.4lf %8.4lf INIT\n",w1[0][0],w1[1][0],w1[2][0],w2[0],n[0]);
- flag = 1;
- for(int i = 0; (x[i]!=0) && (y[i]!=0) && (z[i]!=0) ; i++){
- double tempout2 = forward(x[i],y[i]); // global out1[n], out2, net1[n], net2 are changed
- if( 1){
- flag = 0;
- delta2 = z[i] - tempout2; // expected - current
- for(int k = 0; k < N; k++){
- w2[k] = w2[k] + etha * out1[k] * tanhd(net2) * delta2;
- }
- //input to hidden
- for(int k = 0; k < N; k++){
- delta1[k] = delta2 * w2[k]; //da layer 2 nur 1 neuron
- w1[0][k] = w1[0][k] + etha * x[i] * ( tanhd(net1[k]) ) * delta1[k];
- w1[1][k] = w1[1][k] + etha * y[i] * ( tanhd(net1[k]) ) * delta1[k];
- w1[2][k] = w1[2][k] + etha * bias * ( tanhd(net1[k]) ) * delta1[k];
- }
- //printf("%8.4lf %8.4lf %8.4lf %8.4lf %8.4lf\n",w1[0][k],w1[1][k],w1[2][k],w2[k],n[k]);
- }
- }
- if(flag == 1)
- break;
- if(count == maxrounds)
- break;
- count++;
- }
- //printf("count: %i\n",count);
- }
Add Comment
Please, Sign In to add comment