Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def Train(self, trainingData, epoch, batchSize, learningRate, testingData):
- testDataSize = len(testingData)
- trainDataSize = len(trainingData)
- for i in range(epoch):
- random.shuffle(trainingData)
- nabla_b = [numpy.zeros(b.shape) for b in self.biases]
- nabla_w = [numpy.zeros(w.shape) for w in self.weights]
- for j in range(0, trainDataSize, batchSize):
- nabla_b = [numpy.zeros(b.shape) for b in self.biases]
- nabla_w = [numpy.zeros(w.shape) for w in self.weights]
- miniBatchSize = 0
- for x, y in trainingData[j:j+batchSize]:
- delta_nabla_b, delta_nabla_w = self.Backpropagation(x, y)
- nabla_b = [(delta_nabla_b_row + nabla_b_row) for delta_nabla_b_row, nabla_b_row in zip(delta_nabla_b, nabla_b)]
- nabla_w = [(delta_nabla_w_row + nabla_w_row) for delta_nabla_w_row, nabla_w_row in zip(delta_nabla_w, nabla_w)]
- miniBatchSize = miniBatchSize + 1
- self.weights = [w_layer-(learningRate/miniBatchSize)*nw_layer for w_layer, nw_layer in zip(self.weights, nabla_w)]
- self.biases = [b_layer -(learningRate/miniBatchSize)*nb_layer for b_layer, nb_layer in zip(self.biases, nabla_b)]
- if testingData:
- print ("Epoch {0}: {1} / {2}".format(i, self.evaluate(testingData), testDataSize))
- else:
- print ("Epoch {0} complete".format(j))
Add Comment
Please, Sign In to add comment