Advertisement
jjjj88521

Untitled

Nov 18th, 2019
88
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 7.65 KB | None | 0 0
  1. import numpy as np
  2. import random
  3. import copy
  4. import matplotlib.pyplot as plt
  5.  
  6. class Linear:
  7. def __init__(self,m,n):
  8. self.W,self.b = np.random.randn(m,n)/m,np.random.randn(1,n)/m
  9. self.dW,self.db = None,None
  10.  
  11. def forward(self,x):
  12. self.x = x
  13. out = np.dot(x,self.W)+self.b
  14. return out
  15.  
  16. def backward(self,dout):
  17. dx = np.dot(dout,self.W.T)
  18. self.dW = np.dot(self.x.T,dout)
  19. self.db = np.sum(dout,axis = 0)
  20. return dx
  21.  
  22. class ReLU:
  23. def __init__(self):
  24. pass
  25.  
  26. def forward(self,x):
  27. self.mask = (x <= 0)
  28. out = x
  29. out[out <= 0] = 0
  30. return out
  31.  
  32. def backward(self,dout):
  33. dx = dout
  34. dx[self.mask] = 0
  35. return dx
  36.  
  37. class Sigmoid:
  38. def __init__(self):
  39. pass
  40.  
  41. def forward(self,x):
  42. out = 1/(1+np.exp(-x))
  43. self.o = out
  44. return out
  45.  
  46. def backward(self,dout):
  47. dx = dout*self.o*(1-self.o)
  48. return dx
  49.  
  50. class Tanh:
  51. def __init__(self):
  52. pass
  53.  
  54. def forward(self,x):
  55. out = (np.exp(x)-np.exp(-x))/(np.exp(x)+np.exp(-x))
  56. self.o = out
  57. return out
  58.  
  59. def backward(self,dout):
  60. dx = dout*(1-self.o**2)
  61. return dx
  62.  
  63. class Loss:
  64. def __init__(self):
  65. pass
  66.  
  67. def forward(self,y,ybar):
  68. self.ybar = ybar
  69. return np.sum((y-ybar)**2)
  70.  
  71. def backward(self,dout):
  72. dy = -(2*(y-self.ybar))
  73. return dy
  74.  
  75. class TwoLayer:
  76.  
  77. def __init__(self,m,n,o):
  78. self.linear1 = Linear(m,n)
  79. self.relu = ReLU()
  80. self.linear2 = Linear(n,o)
  81. self.sigmoid = Sigmoid()
  82. self.loss = Loss()
  83. self.last_dW1,self.last_db1 = 0,0
  84. self.last_dW2,self.last_db2 = 0,0
  85.  
  86. def forward(self,x):
  87. x = self.linear1.forward(x)
  88. x = self.relu.forward(x)
  89. x = self.linear2.forward(x)
  90. self.ybar = self.sigmoid.forward(x)
  91. return self.ybar
  92.  
  93. def backward(self,y):
  94. self.L = self.loss.forward(y,self.ybar)
  95. g = self.loss.backward(1)
  96. g = self.sigmoid.backward(g)
  97. g = self.linear2.backward(g)
  98. g = self.relu.backward(g)
  99. g = self.linear1.backward(g)
  100.  
  101. def update(self,eta,alpha):
  102. self.linear1.W = self.linear1.W - eta*self.linear1.dW + alpha*self.last_dW1
  103. self.linear1.b = self.linear1.b - eta*self.linear1.db + alpha*self.last_db1
  104. self.last_dW1 = eta*self.linear1.dW
  105. self.last_db1 = eta*self.linear1.db
  106. self.linear2.W = self.linear2.W - eta*self.linear2.dW + alpha*self.last_dW2
  107. self.linear2.b = self.linear2.b - eta*self.linear2.db + alpha*self.last_db2
  108. self.last_dW2 = eta*self.linear2.dW
  109. self.last_db2 = eta*self.linear2.db
  110.  
  111.  
  112.  
  113. class FiveLayer:
  114.  
  115. def __init__(self,m,n,o,p,q,r):
  116. self.linear1 = Linear(m,n)
  117. self.relu = ReLU()
  118. self.linear2 = Linear(n,o)
  119. self.linear3 = Linear(o,p)
  120. self.linear4 = Linear(p,q)
  121. self.linear5 = Linear(q,r)
  122. self.sigmoid = Sigmoid()
  123. self.loss = Loss()
  124. self.last_dW1,self.last_db1 = 0,0
  125. self.last_dW2,self.last_db2 = 0,0
  126. self.last_dW3,self.last_db3 = 0,0
  127. self.last_dW4,self.last_db4 = 0,0
  128. self.last_dW5,self.last_db5 = 0,0
  129.  
  130. def forward(self,x):
  131. x = self.linear1.forward(x)
  132. x = self.relu.forward(x)
  133. x = self.linear2.forward(x)
  134. x = self.sigmoid.forward(x)
  135. x = self.linear3.forward(x)
  136. x = self.relu.forward(x)
  137. x = self.linear4.forward(x)
  138. x = self.relu.forward(x)
  139. x = self.linear5.forward(x)
  140.  
  141. self.ybar = self.sigmoid.forward(x)
  142. return self.ybar
  143.  
  144. def backward(self,y):
  145. self.L = self.loss.forward(y,self.ybar)
  146. g = self.loss.backward(1)
  147. g = self.sigmoid.backward(g)
  148. g = self.linear5.backward(g)
  149. g = self.relu.backward(g)
  150. g = self.linear4.backward(g)
  151. g = self.relu.backward(g)
  152. g = self.linear3.backward(g)
  153. g = self.sigmoid.backward(g)
  154. g = self.linear2.backward(g)
  155. g = self.relu.backward(g)
  156. g = self.linear1.backward(g)
  157.  
  158. def update(self,eta,alpha):
  159. self.linear1.W = self.linear1.W - eta*self.linear1.dW + alpha*self.last_dW1
  160. self.linear1.b = self.linear1.b - eta*self.linear1.db + alpha*self.last_db1
  161. self.last_dW1 = eta*self.linear1.dW
  162. self.last_db1 = eta*self.linear1.db
  163. self.linear2.W = self.linear2.W - eta*self.linear2.dW + alpha*self.last_dW2
  164. self.linear2.b = self.linear2.b - eta*self.linear2.db + alpha*self.last_db2
  165. self.last_dW2 = eta*self.linear2.dW
  166. self.last_db2 = eta*self.linear2.db
  167. self.linear3.W = self.linear3.W - eta*self.linear3.dW + alpha*self.last_dW3
  168. self.linear3.b = self.linear3.b - eta*self.linear3.db + alpha*self.last_db3
  169. self.last_dW3 = eta*self.linear3.dW
  170. self.last_db3 = eta*self.linear3.db
  171. self.linear4.W = self.linear4.W - eta*self.linear4.dW + alpha*self.last_dW4
  172. self.linear4.b = self.linear4.b - eta*self.linear4.db + alpha*self.last_db4
  173. self.last_dW4 = eta*self.linear4.dW
  174. self.last_db4 = eta*self.linear4.db
  175. self.linear5.W = self.linear5.W - eta*self.linear5.dW + alpha*self.last_dW5
  176. self.linear5.b = self.linear5.b - eta*self.linear5.db + alpha*self.last_db5
  177. self.last_dW5 = eta*self.linear5.dW
  178. self.last_db5 = eta*self.linear5.db
  179.  
  180. class NLayer:
  181.  
  182. def __init__(self,Neuronin,math,numofNeurons):
  183. self.function = []
  184. self.linear = []
  185. self.neuron = []
  186. self.last_dW = []
  187. self.last_db = []
  188. self.neuron.append(Neuronin)
  189. self.neuron += numofNeurons
  190. self.loss = Loss()
  191. self.lengh = len(math)
  192.  
  193. for i in range(0,self.lengh):
  194. self.linear.append(Linear(self.neuron[i],self.neuron[i+1]))
  195. self.last_dW.append(0)
  196. self.last_db.append(0)
  197.  
  198. if math[i] == 'relu':
  199. self.function.append(ReLU())
  200. if math[i] == 'sigmoid':
  201. self.function.append(Sigmoid())
  202. if math[i] == 'tanh':
  203. self.function.append(Tanh())
  204.  
  205.  
  206.  
  207. def forward(self,x):
  208.  
  209. for i in range(0,self.lengh):
  210.  
  211. x = self.linear[i].forward(x)
  212. x = self.function[i].forward(x)
  213. self.ybar = x
  214. return self.ybar
  215.  
  216. def backward(self,y):
  217. self.L = self.loss.forward(y,self.ybar)
  218. g = self.loss.backward(1)
  219. for i in range(self.lengh-1,-1,-1):
  220. g = self.function[i].backward(g)
  221. g = self.linear[i].backward(g)
  222.  
  223. def update(self,eta,alpha):
  224. for i in range(0,self.lengh):
  225. self.linear[i].W = self.linear[i].W - eta*self.linear[i].dW + alpha*self.last_dW[i]
  226. self.linear[i].b = self.linear[i].b - eta*self.linear[i].db + alpha*self.last_db[i]
  227. self.last_dW[i] = eta*self.linear[i].dW
  228. self.last_db[i] = eta*self.linear[i].db
  229.  
  230. def graph(lossm,epoch):
  231. plt.style.use('seaborn-talk')
  232. plt.figure()
  233. plt.plot(epoch, lossm, label = "Training_Error")
  234. plt.xlabel("Epochs")
  235. plt.ylabel("Training Error")
  236. plt.legend()
  237. plt.show()
  238.  
  239.  
  240.  
  241. x1 = []
  242.  
  243. y1 = []
  244.  
  245. lossm = []
  246. epoch = []
  247.  
  248. for i in range(0,256):
  249. count = 0
  250. x1.append([])
  251. y1.append([])
  252. num = '{0:b}'.format(i)
  253.  
  254. num_c = 0
  255. for j in range(0,8):
  256. if (8-len(num)) > j:
  257. x1[i].append(0)
  258. else:
  259. x1[i].append(int(num[num_c]))
  260. num_c = num_c+1
  261. if x1[i][j] == 1:
  262. count = count + 1
  263.  
  264. if count%2 == 0:
  265. y1[i].append(0)
  266. else:
  267. y1[i].append(1)
  268.  
  269.  
  270. X = np.array(x1)
  271. y = np.array(y1)
  272.  
  273.  
  274.  
  275. model = NLayer(8,['relu','tanh','sigmoid'],[64,256,1])
  276.  
  277. max_epochs,chk_epochs = 10000,1000
  278. last_dW,last_db = 0,0
  279. eta,alpha = 0.03,0.6
  280. accuracy = 0
  281. for e in range(max_epochs):
  282. model.forward(X)
  283. model.backward(y)
  284. model.update(eta,alpha)
  285. if(e+1)%chk_epochs == 0:
  286. print('Epcoh %3d: loss =%6f, accuracy = %3f'%(e+1,model.L,accuracy))
  287.  
  288.  
  289. #print("test data is '['1,1,1,1,0,1,0,0']' ")
  290. model.forward(X)
  291.  
  292. print(model.ybar.T)
  293. graph(lossm,epoch)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement