Advertisement
Guest User

Untitled

a guest
Jun 26th, 2019
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.21 KB | None | 0 0
  1. from numpy import exp, array, random, dot
  2.  
  3.  
  4. class NeuralNetwork():
  5. def __init__(self):
  6. # Seed the random number generator, so it generates the same numbers
  7. # every time the program runs.
  8. random.seed(1)
  9.  
  10. # We model a single neuron, with 3 input connections and 1 output connection.
  11. # We assign random weights to a 3 x 1 matrix, with values in the range -1 to 1
  12. # and mean 0.
  13. self.synaptic_weights = 2 * random.random((3, 1)) - 1
  14.  
  15. # The Sigmoid function, which describes an S shaped curve.
  16. # We pass the weighted sum of the inputs through this function to
  17. # normalize them between 0 and 1.
  18. def __sigmoid(self, x):
  19. return 1 / (1 + exp(-x))
  20.  
  21. # The derivative of the Sigmoid function.
  22. # This is the gradient of the Sigmoid curve.
  23. # It indicates how confident we are about the existing weight.
  24. def __sigmoid_derivative(self, x):
  25. return x * (1 - x)
  26.  
  27. # We train the neural network through a process of trial and error.
  28. # Adjusting the synaptic weights each time.
  29. def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations):
  30. for iteration in xrange(number_of_training_iterations):
  31. # Pass the training set through our neural network (a single neuron).
  32. output = self.think(training_set_inputs)
  33.  
  34. # Calculate the error (The difference between the desired output
  35. # and the predicted output).
  36. error = training_set_outputs - output
  37.  
  38. # Multiply the error by the input and again by the gradient of the Sigmoid curve.
  39. # This means less confident weights are adjusted more.
  40. # This means inputs, which are zero, do not cause changes to the weights.
  41. adjustment = dot(training_set_inputs.T, error * self.__sigmoid_derivative(output))
  42.  
  43. # Adjust the weights.
  44. self.synaptic_weights += adjustment
  45.  
  46. # The neural network thinks.
  47. def think(self, inputs):
  48. # Pass inputs through our neural network (our single neuron).
  49. return self.__sigmoid(dot(inputs, self.synaptic_weights))
  50.  
  51.  
  52. if __name__ == "__main__":
  53.  
  54. #Initialize a single neuron neural network.
  55. neural_network = NeuralNetwork()
  56.  
  57. print "Random starting synaptic weights: "
  58. print neural_network.synaptic_weights
  59.  
  60. # The training set. We have 4 examples, each consisting of 3 input values
  61. # and 1 output value.
  62. training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
  63. training_set_outputs = array([[0, 1, 1, 0]]).T
  64.  
  65. # Train the neural network using a training set.
  66. # Do it 10,000 times and make small adjustments each time.
  67. neural_network.train(training_set_inputs, training_set_outputs, 10000)
  68.  
  69. print "New synaptic weights after training: "
  70. print neural_network.synaptic_weights
  71.  
  72. #Test the neural network with a new situation.
  73. print "Considering new situation [1, 0, 0] -> ?: "
  74. print neural_network.think(array([1, 0, 0]))
  75. # Have the user input a set of integers to test!
  76. NewIntegers = input("Input a set of integers using the form [x,y,w]")
  77. print neural_network.think(array(NewIntegers))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement