Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import math
- class MLP():
- learning_rate = 0.1
- epoch = 2000
- display = 100
- w11=0.5;w12=0.9;w21=0.4;w22=1.0;w31=-1.2;w32=1.1;b1=-1;b2=-1;b3=-1
- def train(self):
- step = 1
- while(step < self.epoch):
- for i in range(0,4):
- result= self.train_net(int(i/2),i%2)
- if step % self.display==0:
- print("epoch:",step,"input1:",int(i/2),"input2:",i%2,"output:",result)
- step+=1
- def train_net(self,input1,input2):
- net1 = self.w11*input1+self.w21*input2+self.b1
- net2 = self.w12*input1+self.w22*input2+self.b2
- y1 = self.sigmoid(net1)
- y2 = self.sigmoid(net2)
- net3 = y1*self.w31+y2*self.w32+self.b3
- y3 = self.sigmoid(net3)
- target = int(input1) & int(input2)
- deviation3 = y3*(1-y3)*(target-y3)
- deviation1 = self.w31*deviation3*y1*(1-y1)
- deviation2 = self.w32*deviation3*y2*(1-y2)
- self.w11 += self.learning_rate*deviation1*input1
- self.w12 += self.learning_rate*deviation2*input2
- self.w21 += self.learning_rate*deviation1*input1
- self.w22 += self.learning_rate*deviation2*input2
- self.w31 += self.learning_rate*deviation3*y1
- self.w32 += self.learning_rate*deviation3*y2
- self.b1 += self.learning_rate*deviation1
- self.b2 += self.learning_rate*deviation2
- self.b3 += self.learning_rate*deviation3
- # print("input1 :"+str(input1)+" input2 :"+str(input2))
- # print("output :"+str(y3))
- #print("w11:"+str(self.w11))
- #print("w12:"+str(self.w12))
- #print("w21:"+str(self.w21))
- #print("w22:"+str(self.w22))
- #print("w31:"+str(self.w31))
- #print("w32:"+str(self.w32))
- #print("b1:"+str(self.b1))
- #print("b2:"+str(self.b2))
- #print("b3:"+str(self.b3))
- return y3
- def sigmoid(self, net):
- return (1/(1+math.exp(-net)))
- if __name__== '__main__':
- mlp = MLP()
- mlp.train()
Add Comment
Please, Sign In to add comment