Advertisement
Guest User

Untitled

a guest
Jan 7th, 2018
51
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.84 KB | None | 0 0
  1. def train(self, training_set_inputs, training_set_outputs):
  2.  
  3. for i in range(100):
  4. self.train_global_step(training_set_inputs, training_set_outputs)
  5. if (self.layer2_error) < 0.1:
  6. break
  7.  
  8.  
  9. def train_global_step(self, training_set_inputs, training_set_outputs):
  10.  
  11. self.layer2_error = 0
  12. self.learning_rate = 0.1
  13.  
  14. for iteration in range(len(training_set_inputs)):
  15. # Pass the training set through our neural network
  16. output_from_layer_1, output_from_layer_2 = self.think(training_set_inputs)
  17.  
  18. # Calculate the error for layer 2 (The difference between the desired output
  19. # and the predicted output).
  20. layer2_error = training_set_outputs - output_from_layer_2
  21. layer2_delta = layer2_error * self.__sigmoid_derivative(output_from_layer_2)
  22.  
  23. # Calculate the error for layer 1 (By looking at the weights in layer 1,
  24. # we can determine by how much layer 1 contributed to the error in layer 2).
  25. layer1_error = layer2_delta.dot(self.layer2.synaptic_weights.T)
  26. layer1_delta = layer1_error * self.__sigmoid_derivative(output_from_layer_1)
  27.  
  28. # Calculate how much to adjust the weights by
  29. layer1_adjustment = training_set_inputs.T.dot(layer1_delta)
  30. layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)
  31.  
  32. # Adjust the weights.
  33.  
  34. self.layer1.synaptic_weights += self.learning_rate*layer1_adjustment
  35. self.layer2.synaptic_weights += self.learning_rate*layer2_adjustment
  36.  
  37.  
  38. break
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement