Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import time
- import sys
- class NeuralNetwork:
- def __init__ (self, layers, eps = 0.3):
- self.w = []
- self.eta = 0.3
- self.learningStepCount = 500
- for i in range(1, len(layers)):
- self.w.append(np.matrix(eps * (2 * np.random.random((layers[i], layers[i - 1])) - 1)))
- def sigmoid(self, x):
- return 1.0 / (1 + np.exp(-x))
- def getOutput(self, x):
- for layer in self.w:
- x = self.sigmoid(layer * x)
- ind = x.argmax()
- x = np.zeros(len(x))
- x[ind] = 1
- return x
- def gradient(self, x):
- return np.multiply(x, 1 - x)
- #@profile
- def train(self, testX, testY):
- for step in range(self.learningStepCount):
- for xInput, y in zip(testX, testY):
- x = []
- for layer in self.w:
- x.append(xInput)
- xInput = self.sigmoid(layer * xInput)
- output = xInput
- delta = np.multiply(self.gradient(output), (y - output))
- for i in reversed(range(len(self.w))):
- self.w[i] += self.eta * delta * x[i].T
- delta = np.multiply(self.gradient(x[i]), self.w[i].T * delta)
- print(step)
- #
- @profile
- def main():
- x, y = [], []
- tests = []
- f = open('train.csv', 'r')
- for line in f:
- tests.append(np.array(list(map(int, line.split(',')))))
- f.close()
- np.random.shuffle(tests)
- for test in tests:
- x.append(np.matrix(test[1:]).T)
- tmp = [0] * 10
- tmp[int(test[0]) - 1] = 1
- y.append(np.matrix(tmp).T)
- netw = NeuralNetwork([784, 2000, 10])
- tests = tests[:len(tests) // 10]
- testCount = len(tests)
- print(testCount)
- tests.clear()
- netw.train(x[:testCount // 2], y[:testCount // 2])
- good = 0
- for i in range(testCount // 2, testCount):
- if (netw.getOutput(x[i]) == y[i]).all():
- good += 1
- print('Spent', time.time() - timer, 'seconds')
- print('Accuracy:', good * 1.0 / testCount * 2)
- if __name__ == '__main__':
- main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement