Guest User

Untitled

a guest
Nov 20th, 2017
82
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.23 KB | None | 0 0
  1. def Train(self, trainingData, epoch, batchSize, learningRate, testingData):
  2. testDataSize = len(testingData)
  3. trainDataSize = len(trainingData)
  4. for i in range(epoch):
  5. random.shuffle(trainingData)
  6.  
  7. nabla_b = [numpy.zeros(b.shape) for b in self.biases]
  8. nabla_w = [numpy.zeros(w.shape) for w in self.weights]
  9.  
  10. for j in range(0, trainDataSize, batchSize):
  11. nabla_b = [numpy.zeros(b.shape) for b in self.biases]
  12. nabla_w = [numpy.zeros(w.shape) for w in self.weights]
  13. miniBatchSize = 0
  14. for x, y in trainingData[j:j+batchSize]:
  15. delta_nabla_b, delta_nabla_w = self.Backpropagation(x, y)
  16.  
  17. nabla_b = [(delta_nabla_b_row + nabla_b_row) for delta_nabla_b_row, nabla_b_row in zip(delta_nabla_b, nabla_b)]
  18. nabla_w = [(delta_nabla_w_row + nabla_w_row) for delta_nabla_w_row, nabla_w_row in zip(delta_nabla_w, nabla_w)]
  19.  
  20. miniBatchSize = miniBatchSize + 1
  21. self.weights = [w_layer-(learningRate/miniBatchSize)*nw_layer for w_layer, nw_layer in zip(self.weights, nabla_w)]
  22. self.biases = [b_layer -(learningRate/miniBatchSize)*nb_layer for b_layer, nb_layer in zip(self.biases, nabla_b)]
  23. if testingData:
  24. print ("Epoch {0}: {1} / {2}".format(i, self.evaluate(testingData), testDataSize))
  25. else:
  26. print ("Epoch {0} complete".format(j))
Add Comment
Please, Sign In to add comment