Guest User

Untitled

a guest
Jun 20th, 2018
111
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.38 KB | None | 0 0
  1. from numpy import exp,array, random, dot
  2.  
  3.  
  4. class NeuralNetwork():
  5. def __init__(self):
  6. # Seed the random number generator, so it generates the same numbers
  7. # every time the program runs.
  8. random.seed(1)
  9.  
  10. # We model a single neuron, with 3 input connections and 1 output connection.
  11. # We assign random weights to a 3 x 1 matrix, with values in the range -1 to 1
  12. # and mean 0.
  13. self.synaptic_weight = 2*random.random((3,1)) - 1
  14.  
  15.  
  16. # The Sigmoid function, which describes an S shaped curve.
  17. # We pass the weighted sum of the inputs through this function to
  18. # normalise them between 0 and 1.
  19. def _sigmoid_(self,x):
  20.  
  21. return 1 / (1 + exp(-x))
  22.  
  23. # The derivative of the Sigmoid function.
  24. # This is the gradient of the Sigmoid curve.
  25. # It indicates how confident we are about the existing weight
  26. def _sigmoid_derivative(self,x):
  27.  
  28. return x * (1 - x)
  29.  
  30. # We train the neural network through a process of trial and error.
  31. # Adjusting the synaptic weights each time.
  32.  
  33. def train(self , training_set_inputs , training_set_outputs , number_of_training_iterations):
  34. for iteration in xrange(number_of_training_iterations):
  35. # Pass the training set through our neural network (a single neuron).
  36. output = self.think(training_set_inputs)
  37. # Calculate the error (The difference between the desired output
  38. # and the predicted output).
  39. error = (training_set_outputs - output)
  40. # Multiply the error by the input and again by the gradient of the Sigmoid curve.
  41. # This means less confident weights are adjusted more.
  42. # This means inputs, which are zero, do not cause changes to the weights.
  43.  
  44.  
  45. adjustment = dot(training_set_inputs.T , error * self._sigmoid_derivative(output))
  46.  
  47. # Adjust the weights.
  48.  
  49. self.synaptic_weights += adjustment
  50.  
  51. # The neural network thinks.
  52. def think(self, inputs):
  53. # Pass inputs through our neural network (our single neuron).
  54. return self.__sigmoid(dot(inputs, self.synaptic_weights))
  55.  
  56.  
  57. if __name__ == "__main__":
  58.  
  59. neural_network = NeuralNetwork()
  60.  
  61. print ('Random starting synaptic weights: ')
  62. print (neural_network.synaptic_weights)
  63.  
  64.  
  65. training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
  66. training_set_outputs = array([[0, 1, 1, 0]]).T
  67.  
  68.  
  69. neural_network.train(training_set_inputs,training_set_outputs,10000)
  70.  
  71. print ("New synaptic weights after training: ")
  72. print (neural_network.synaptic_weights)
  73.  
  74. print ("Considering new situation [1, 0, 0] -> ?: ")
  75. print (neural_network.think(array([1, 0, 0])))
  76.  
  77.  
  78.  
  79.  
  80.  
  81.  
  82.  
  83.  
  84.  
  85.  
  86.  
  87. from numpy import exp,array, random, dot
  88.  
  89.  
  90. class NeuralNetwork():
  91. def __init__(self):
  92. # Seed the random number generator, so it generates the same numbers
  93. # every time the program runs.
  94. random.seed(1)
  95.  
  96. # We model a single neuron, with 3 input connections and 1 output connection.
  97. # We assign random weights to a 3 x 1 matrix, with values in the range -1 to 1
  98. # and mean 0.
  99. self.synaptic_weight = 2*random.random((3,1)) - 1
  100.  
  101.  
  102. # The Sigmoid function, which describes an S shaped curve.
  103. # We pass the weighted sum of the inputs through this function to
  104. # normalise them between 0 and 1.
  105. def _sigmoid_(self,x):
  106.  
  107. return 1 / (1 + exp(-x))
  108.  
  109. # The derivative of the Sigmoid function.
  110. # This is the gradient of the Sigmoid curve.
  111. # It indicates how confident we are about the existing weight
  112. def _sigmoid_derivative(self,x):
  113.  
  114. return x * (1 - x)
  115.  
  116. # We train the neural network through a process of trial and error.
  117. # Adjusting the synaptic weights each time.
  118.  
  119. def train(self , training_set_inputs , training_set_outputs , number_of_training_iterations):
  120. for iteration in xrange(number_of_training_iterations):
  121. # Pass the training set through our neural network (a single neuron).
  122. output = self.think(training_set_inputs)
  123. # Calculate the error (The difference between the desired output
  124. # and the predicted output).
  125. error = (training_set_outputs - output)
  126. # Multiply the error by the input and again by the gradient of the Sigmoid curve.
  127. # This means less confident weights are adjusted more.
  128. # This means inputs, which are zero, do not cause changes to the weights.
  129.  
  130.  
  131. adjustment = dot(training_set_inputs.T , error * self._sigmoid_derivative(output))
  132.  
  133. # Adjust the weights.
  134.  
  135. self.synaptic_weights += adjustment
  136.  
  137. # The neural network thinks.
  138. def think(self, inputs):
  139. # Pass inputs through our neural network (our single neuron).
  140. return self.__sigmoid(dot(inputs, self.synaptic_weights))
  141.  
  142.  
  143. if __name__ == "__main__":
  144.  
  145. neural_network = NeuralNetwork()
  146.  
  147. print ('Random starting synaptic weights: ')
  148. print (neural_network.synaptic_weights)
  149.  
  150.  
  151. training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
  152. training_set_outputs = array([[0, 1, 1, 0]]).T
  153.  
  154.  
  155. neural_network.train(training_set_inputs,training_set_outputs,10000)
  156.  
  157. print ("New synaptic weights after training: ")
  158. print (neural_network.synaptic_weights)
  159.  
  160. print ("Considering new situation [1, 0, 0] -> ?: ")
  161. print (neural_network.think(array([1, 0, 0])))
Add Comment
Please, Sign In to add comment