Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import pandas as pd
- ## reading in data and cleaning it
- myData = pd.read_csv("a2-train-data.txt", header = None,
- delimiter = " ")
- labels = pd.read_csv("a2-train-label.txt", header = None,
- delimiter = " ").values
- myData.drop([1000], axis = 1, inplace = True)
- myData = myData.values
- class DL(object) :
- def __init__(self, trainingSet, labels, nNodes, epoch = 20, eta = 0.1) :
- self.trainingSet = trainingSet
- self.labels = labels
- self.nrow = trainingSet.shape[0]
- self.ncol = trainingSet.shape[1]
- self.bias1 = np.random.randn(nNodes)
- self.bias2 = np.random.randn(1)
- self.inToHidden = np.random.randn(self.ncol, nNodes)
- self.hiddenToOutput = np.random.randn(nNodes, 1)
- self.epoch = epoch
- self.eta = eta
- def relu(self, x) :
- return max(0, x)
- def deriRelu(self, x) :
- if x > 0 :
- return 1
- elif x < 0 :
- return 0
- def sigmoid(self, x) :
- x = np.clip(x, -500, 500)
- return 1 / (1 + np.exp(-x))
- def deriSigmoid(self, x) :
- return x * (1 - x)
- def squaredError(self, yHat, y) :
- return np.sum(np.subtract(yHat, y) ** 2) / 2
- def deriSquaredError(self, yHat, y) :
- return yHat - y
- def crossEntropy(self, yHat, y):
- return (np.sum(-(y * np.log(yHat) + (1 - y) * np.log(1 - yHat)))) / y.Hat.shape[0]
- def normalize(self) :
- self.trainingSet = np.apply_along_axis(
- lambda x : (x - np.mean(x)) / np.std(x), 1, self.trainingSet)
- def fit(self) :
- relu = np.vectorize(self.relu, otypes = [np.float64])
- sigmoid = np.vectorize(self.sigmoid, otypes = [np.float64])
- deriRelu = np.vectorize(self.deriRelu, otypes = [np.float64])
- deriSigmoid = np.vectorize(self.deriSigmoid, otypes = [np.float64])
- #epoch
- for epoch in range(self.epoch) :
- executedOrder = np.random.choice(np.arange(self.nrow), self.nrow, False)
- for runningIndex in executedOrder :
- #forward propagate
- # first layer
- hiddenLayerOutput = np.dot(self.trainingSet[runningIndex,], self.inToHidden) + self.bias1
- hiddenLayerActi = relu(hiddenLayerOutput)
- # second layer
- outputLayerOutput = np.dot(hiddenLayerActi, self.hiddenToOutput) + self.bias2
- outputLayerActi = sigmoid(outputLayerOutput)
- # back propagate
- holdingPart = np.subtract(outputLayerActi, self.labels[runningIndex]) \
- * deriSigmoid(outputLayerActi)
- hiddenToOutputUpdate = holdingPart * hiddenLayerActi
- bias2Update = holdingPart
- holdingPart2 = np.dot(self.hiddenToOutput, holdingPart) \
- * deriRelu(hiddenLayerActi)
- bias1Update = holdingPart2
- inputToHiddenUpdate = np.outer(self.trainingSet[runningIndex], holdingPart2)
- # weights update
- self.inToHidden = self.inToHidden - self.eta * inputToHiddenUpdate
- self.bias1 = self.bias1 - self.eta * bias1Update
- self.hiddenToOutput = self.hiddenToOutput - self.eta * hiddenToOutputUpdate
- self.bias2 = self.bias2 - self.eta * bias2Update
- Layer1 = np.dot(self.trainingSet, self.inToHidden) + self.bias1
- actiLayer1 = relu(Layer1)
- Layer2 = np.dot(actiLayer1, self.hiddenToOutput) + self.bias2
- actiLayer2 = sigmoid(Layer2)
- totalError = np.sum(np.subtract(actiLayer2, self.labels) ** 2) / 2
- print(f"The total error at {epoch} epoch is {totalError}")
- '''
- The total error at 0 epoch is 67650.48391623204
- The total error at 1 epoch is 67582.49720719726
- The total error at 2 epoch is 67558.90366369426
- The total error at 3 epoch is 67546.03179988083
- The total error at 4 epoch is 67538.2747893663
- The total error at 5 epoch is 67532.79780871204
- The total error at 6 epoch is 67528.8413158413
- The total error at 7 epoch is 67525.76611936158
- The total error at 8 epoch is 67523.32145181064
- The total error at 9 epoch is 67521.32934587132
- The total error at 10 epoch is 67519.6689200652
- The total error at 11 epoch is 67518.25611263084
- The total error at 12 epoch is 67517.04966654231
- The total error at 13 epoch is 67516.00258507676
- The total error at 14 epoch is 67515.07585268158
- The total error at 15 epoch is 67514.2592940233
- The total error at 16 epoch is 67513.53345041646
- The total error at 17 epoch is 67512.87758371508
- The total error at 18 epoch is 67512.28413783187
- The total error at 19 epoch is 67511.74679686771
- The total error at 20 epoch is 67511.25654914613
- The total error at 21 epoch is 67510.80672621084
- The total error at 22 epoch is 67510.39371369651
- The total error at 23 epoch is 67510.01247291012
- The total error at 24 epoch is 67509.6583550539
- The total error at 25 epoch is 67509.33020416187
- The total error at 26 epoch is 67509.02386520518
- The total error at 27 epoch is 67508.73746719363
- The total error at 28 epoch is 67508.46993419493
- The total error at 29 epoch is 67508.21885968384
- The total error at 30 epoch is 67507.98237792609
- The total error at 31 epoch is 67507.75955316664
- The total error at 32 epoch is 67507.54951460163
- The total error at 33 epoch is 67507.35090217054
- The total error at 34 epoch is 67507.16248081246
- The total error at 35 epoch is 67506.98359843464
- The total error at 36 epoch is 67506.81375374572
- The total error at 37 epoch is 67506.65232726662
- The total error at 38 epoch is 67506.49872842756
- The total error at 39 epoch is 67506.35210005984
- The total error at 40 epoch is 67506.2120777713
- The total error at 41 epoch is 67506.07838193102
- The total error at 42 epoch is 67505.95038984748
- The total error at 43 epoch is 67505.82791390341
- The total error at 44 epoch is 67505.71049306939
- The total error at 45 epoch is 67505.59771200454
- The total error at 46 epoch is 67505.4895443541
- The total error at 47 epoch is 67505.38553033186
- The total error at 48 epoch is 67505.28545585572
- The total error at 49 epoch is 67505.18912281428
- The total error at 50 epoch is 67505.09631462146
- The total error at 51 epoch is 67505.00689552687
- The total error at 52 epoch is 67504.92056531263
- The total error at 53 epoch is 67504.8372545194
- The total error at 54 epoch is 67504.756749623
- The total error at 55 epoch is 67504.67896018295
- The total error at 56 epoch is 67504.60371728735
- The total error at 57 epoch is 67504.53093182023
- The total error at 58 epoch is 67504.46046890262
- The total error at 59 epoch is 67504.39219765665
- The total error at 60 epoch is 67504.32603372834
- The total error at 61 epoch is 67504.26185004266
- The total error at 62 epoch is 67504.19961364011
- The total error at 63 epoch is 67504.13920209499
- The total error at 64 epoch is 67504.08053386342
- The total error at 65 epoch is 67504.02352792272
- The total error at 66 epoch is 67503.96811677932
- The total error at 67 epoch is 67503.91424207833
- The total error at 68 epoch is 67503.86183298097
- The total error at 69 epoch is 67503.81083092757
- The total error at 70 epoch is 67503.76118495633
- The total error at 71 epoch is 67503.71284220462
- The total error at 72 epoch is 67503.6657494073
- The total error at 73 epoch is 67503.61985049216
- The total error at 74 epoch is 67503.5751102512
- The total error at 75 epoch is 67503.53148868076
- The total error at 76 epoch is 67503.48892664777
- The total error at 77 epoch is 67503.44740012265
- The total error at 78 epoch is 67503.40685993138
- The total error at 79 epoch is 67503.36727771621
- The total error at 80 epoch is 67503.3286158604
- The total error at 81 epoch is 67503.29085785548
- The total error at 82 epoch is 67503.25394428574
- The total error at 83 epoch is 67503.21787410666
- The total error at 84 epoch is 67503.18259708218
- The total error at 85 epoch is 67503.14809995562
- The total error at 86 epoch is 67503.11435593311
- The total error at 87 epoch is 67503.08133753107
- The total error at 88 epoch is 67503.0490222797
- The total error at 89 epoch is 67503.01739234959
- The total error at 90 epoch is 67502.98641884349
- The total error at 91 epoch is 67502.95608547877
- The total error at 92 epoch is 67502.92636913632
- The total error at 93 epoch is 67502.89725933937
- The total error at 94 epoch is 67502.8687259568
- The total error at 95 epoch is 67502.84075651446
- The total error at 96 epoch is 67502.8133341945
- The total error at 97 epoch is 67502.78644901175
- The total error at 98 epoch is 67502.76007181007
- The total error at 99 epoch is 67502.73419589132
- '''
Advertisement
Add Comment
Please, Sign In to add comment