Advertisement
muntoo

Basic neural network example

Jan 7th, 2018
258
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. def train(self, training_set_inputs, training_set_outputs):
  2.     for i in range(100):
  3.         self.train_global_step(training_set_inputs, training_set_outputs)
  4.         if self.layer2_error < 0.1:
  5.             break
  6.  
  7. def train_global_step(self, training_set_inputs, training_set_outputs):
  8.     self.layer2_error = 0
  9.     self.learning_rate = 0.1
  10.  
  11.     for iteration in range(len(training_set_inputs)):
  12.         # Pass the training set through our neural network
  13.         output_from_layer_1, output_from_layer_2 = self.think(training_set_inputs)
  14.  
  15.         # Calculate the error for layer 2 (The difference between the desired output
  16.         # and the predicted output).
  17.         layer2_error = training_set_outputs - output_from_layer_2
  18.         layer2_delta = layer2_error * self.__sigmoid_derivative(output_from_layer_2)
  19.  
  20.         # Calculate the error for layer 1 (By looking at the weights in layer 1,
  21.         # we can determine by how much layer 1 contributed to the error in layer 2).
  22.         layer1_error = layer2_delta.dot(self.layer2.synaptic_weights.T)
  23.         layer1_delta = layer1_error * self.__sigmoid_derivative(output_from_layer_1)
  24.  
  25.         # Calculate how much to adjust the weights by
  26.         layer1_adjustment = training_set_inputs.T.dot(layer1_delta)
  27.         layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)
  28.  
  29.         # Adjust the weights.
  30.         self.layer1.synaptic_weights += self.learning_rate*layer1_adjustment
  31.         self.layer2.synaptic_weights += self.learning_rate*layer2_adjustment
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement