Guest User

Untitled

a guest
Nov 9th, 2018
546
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 8.86 KB | None | 0 0
  1. import numpy as np
  2. import pandas as pd
  3. ## reading in data and cleaning it
  4. myData = pd.read_csv("a2-train-data.txt", header = None,
  5.                      delimiter = " ")
  6. labels = pd.read_csv("a2-train-label.txt", header = None,
  7.                      delimiter = " ").values
  8. myData.drop([1000], axis = 1, inplace = True)
  9. myData = myData.values
  10.  
  11. class DL(object) :
  12.    
  13.     def __init__(self, trainingSet, labels, nNodes, epoch = 20, eta = 0.1) :
  14.         self.trainingSet = trainingSet
  15.         self.labels = labels
  16.         self.nrow = trainingSet.shape[0]
  17.         self.ncol = trainingSet.shape[1]
  18.         self.bias1 = np.random.randn(nNodes)
  19.         self.bias2 = np.random.randn(1)
  20.         self.inToHidden = np.random.randn(self.ncol, nNodes)
  21.         self.hiddenToOutput = np.random.randn(nNodes, 1)
  22.         self.epoch = epoch
  23.         self.eta = eta
  24.        
  25.            
  26.     def relu(self, x) :
  27.         return max(0, x)
  28.    
  29.     def deriRelu(self, x) :
  30.         if x > 0 :
  31.             return 1
  32.         elif x < 0 :
  33.             return 0
  34.    
  35.     def sigmoid(self, x) :
  36.         x = np.clip(x, -500, 500)
  37.         return 1 / (1 + np.exp(-x))
  38.    
  39.     def deriSigmoid(self, x) :
  40.         return x * (1 - x)
  41.    
  42.     def squaredError(self, yHat, y) :
  43.         return np.sum(np.subtract(yHat, y) ** 2) / 2
  44.    
  45.     def deriSquaredError(self, yHat, y) :
  46.         return yHat - y
  47.    
  48.     def crossEntropy(self, yHat, y):
  49.         return (np.sum(-(y * np.log(yHat) + (1 - y) * np.log(1 - yHat)))) / y.Hat.shape[0]
  50.    
  51.     def normalize(self) :
  52.         self.trainingSet = np.apply_along_axis(
  53.                 lambda x : (x - np.mean(x)) / np.std(x), 1, self.trainingSet)
  54.        
  55.     def fit(self) :
  56.         relu = np.vectorize(self.relu, otypes = [np.float64])
  57.         sigmoid = np.vectorize(self.sigmoid, otypes = [np.float64])
  58.         deriRelu = np.vectorize(self.deriRelu, otypes = [np.float64])
  59.         deriSigmoid = np.vectorize(self.deriSigmoid, otypes = [np.float64])
  60.         #epoch
  61.         for epoch in range(self.epoch) :
  62.             executedOrder = np.random.choice(np.arange(self.nrow), self.nrow, False)
  63.             for runningIndex in executedOrder :
  64.                 #forward propagate
  65.                 # first layer
  66.                 hiddenLayerOutput = np.dot(self.trainingSet[runningIndex,], self.inToHidden) + self.bias1
  67.                 hiddenLayerActi = relu(hiddenLayerOutput)
  68.                
  69.                 # second layer
  70.                 outputLayerOutput = np.dot(hiddenLayerActi, self.hiddenToOutput) + self.bias2
  71.                 outputLayerActi = sigmoid(outputLayerOutput)
  72.                    
  73.                 # back propagate
  74.                 holdingPart = np.subtract(outputLayerActi, self.labels[runningIndex]) \
  75.                               * deriSigmoid(outputLayerActi)
  76.                 hiddenToOutputUpdate = holdingPart * hiddenLayerActi
  77.                 bias2Update = holdingPart
  78.                 holdingPart2 = np.dot(self.hiddenToOutput, holdingPart) \
  79.                                       * deriRelu(hiddenLayerActi)
  80.                 bias1Update = holdingPart2
  81.                 inputToHiddenUpdate = np.outer(self.trainingSet[runningIndex], holdingPart2)
  82.                
  83.                 # weights update
  84.                 self.inToHidden = self.inToHidden - self.eta * inputToHiddenUpdate
  85.                 self.bias1 = self.bias1 - self.eta * bias1Update
  86.                 self.hiddenToOutput = self.hiddenToOutput - self.eta * hiddenToOutputUpdate
  87.                 self.bias2 = self.bias2 - self.eta * bias2Update
  88.             Layer1 = np.dot(self.trainingSet, self.inToHidden) + self.bias1
  89.             actiLayer1 = relu(Layer1)
  90.             Layer2 = np.dot(actiLayer1, self.hiddenToOutput) + self.bias2
  91.             actiLayer2 = sigmoid(Layer2)
  92.             totalError = np.sum(np.subtract(actiLayer2, self.labels) ** 2) / 2
  93.             print(f"The total error at {epoch} epoch is {totalError}")
  94. '''
  95. The total error at 0 epoch is 67650.48391623204
  96. The total error at 1 epoch is 67582.49720719726
  97. The total error at 2 epoch is 67558.90366369426
  98. The total error at 3 epoch is 67546.03179988083
  99. The total error at 4 epoch is 67538.2747893663
  100. The total error at 5 epoch is 67532.79780871204
  101. The total error at 6 epoch is 67528.8413158413
  102. The total error at 7 epoch is 67525.76611936158
  103. The total error at 8 epoch is 67523.32145181064
  104. The total error at 9 epoch is 67521.32934587132
  105. The total error at 10 epoch is 67519.6689200652
  106. The total error at 11 epoch is 67518.25611263084
  107. The total error at 12 epoch is 67517.04966654231
  108. The total error at 13 epoch is 67516.00258507676
  109. The total error at 14 epoch is 67515.07585268158
  110. The total error at 15 epoch is 67514.2592940233
  111. The total error at 16 epoch is 67513.53345041646
  112. The total error at 17 epoch is 67512.87758371508
  113. The total error at 18 epoch is 67512.28413783187
  114. The total error at 19 epoch is 67511.74679686771
  115. The total error at 20 epoch is 67511.25654914613
  116. The total error at 21 epoch is 67510.80672621084
  117. The total error at 22 epoch is 67510.39371369651
  118. The total error at 23 epoch is 67510.01247291012
  119. The total error at 24 epoch is 67509.6583550539
  120. The total error at 25 epoch is 67509.33020416187
  121. The total error at 26 epoch is 67509.02386520518
  122. The total error at 27 epoch is 67508.73746719363
  123. The total error at 28 epoch is 67508.46993419493
  124. The total error at 29 epoch is 67508.21885968384
  125. The total error at 30 epoch is 67507.98237792609
  126. The total error at 31 epoch is 67507.75955316664
  127. The total error at 32 epoch is 67507.54951460163
  128. The total error at 33 epoch is 67507.35090217054
  129. The total error at 34 epoch is 67507.16248081246
  130. The total error at 35 epoch is 67506.98359843464
  131. The total error at 36 epoch is 67506.81375374572
  132. The total error at 37 epoch is 67506.65232726662
  133. The total error at 38 epoch is 67506.49872842756
  134. The total error at 39 epoch is 67506.35210005984
  135. The total error at 40 epoch is 67506.2120777713
  136. The total error at 41 epoch is 67506.07838193102
  137. The total error at 42 epoch is 67505.95038984748
  138. The total error at 43 epoch is 67505.82791390341
  139. The total error at 44 epoch is 67505.71049306939
  140. The total error at 45 epoch is 67505.59771200454
  141. The total error at 46 epoch is 67505.4895443541
  142. The total error at 47 epoch is 67505.38553033186
  143. The total error at 48 epoch is 67505.28545585572
  144. The total error at 49 epoch is 67505.18912281428
  145. The total error at 50 epoch is 67505.09631462146
  146. The total error at 51 epoch is 67505.00689552687
  147. The total error at 52 epoch is 67504.92056531263
  148. The total error at 53 epoch is 67504.8372545194
  149. The total error at 54 epoch is 67504.756749623
  150. The total error at 55 epoch is 67504.67896018295
  151. The total error at 56 epoch is 67504.60371728735
  152. The total error at 57 epoch is 67504.53093182023
  153. The total error at 58 epoch is 67504.46046890262
  154. The total error at 59 epoch is 67504.39219765665
  155. The total error at 60 epoch is 67504.32603372834
  156. The total error at 61 epoch is 67504.26185004266
  157. The total error at 62 epoch is 67504.19961364011
  158. The total error at 63 epoch is 67504.13920209499
  159. The total error at 64 epoch is 67504.08053386342
  160. The total error at 65 epoch is 67504.02352792272
  161. The total error at 66 epoch is 67503.96811677932
  162. The total error at 67 epoch is 67503.91424207833
  163. The total error at 68 epoch is 67503.86183298097
  164. The total error at 69 epoch is 67503.81083092757
  165. The total error at 70 epoch is 67503.76118495633
  166. The total error at 71 epoch is 67503.71284220462
  167. The total error at 72 epoch is 67503.6657494073
  168. The total error at 73 epoch is 67503.61985049216
  169. The total error at 74 epoch is 67503.5751102512
  170. The total error at 75 epoch is 67503.53148868076
  171. The total error at 76 epoch is 67503.48892664777
  172. The total error at 77 epoch is 67503.44740012265
  173. The total error at 78 epoch is 67503.40685993138
  174. The total error at 79 epoch is 67503.36727771621
  175. The total error at 80 epoch is 67503.3286158604
  176. The total error at 81 epoch is 67503.29085785548
  177. The total error at 82 epoch is 67503.25394428574
  178. The total error at 83 epoch is 67503.21787410666
  179. The total error at 84 epoch is 67503.18259708218
  180. The total error at 85 epoch is 67503.14809995562
  181. The total error at 86 epoch is 67503.11435593311
  182. The total error at 87 epoch is 67503.08133753107
  183. The total error at 88 epoch is 67503.0490222797
  184. The total error at 89 epoch is 67503.01739234959
  185. The total error at 90 epoch is 67502.98641884349
  186. The total error at 91 epoch is 67502.95608547877
  187. The total error at 92 epoch is 67502.92636913632
  188. The total error at 93 epoch is 67502.89725933937
  189. The total error at 94 epoch is 67502.8687259568
  190. The total error at 95 epoch is 67502.84075651446
  191. The total error at 96 epoch is 67502.8133341945
  192. The total error at 97 epoch is 67502.78644901175
  193. The total error at 98 epoch is 67502.76007181007
  194. The total error at 99 epoch is 67502.73419589132
  195. '''
Advertisement
Add Comment
Please, Sign In to add comment