Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class NeuralNetwork(object):
- def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate):
- # Set number of nodes in input, hidden and output layers.
- self.input_nodes = input_nodes
- self.hidden_nodes = hidden_nodes
- self.output_nodes = output_nodes
- self.lr = learning_rate
- # Initialize weights
- self.input_hidden_weights = np.random.randn(hidden_nodes, input_nodes) # 10x7
- self.hidden_output_weights = np.random.randn(output_nodes, hidden_nodes) # 1x10
- # Sigmoid activation funciton
- self.sigmoid = lambda x: 1/(1+np.exp(-x))
- self.diff_sigm = lambda x: x*(1-x)
- def train(self, input_list, label_list):
- # Create an array of inputs and labels
- inputs = np.array(input_list, ndmin=2).T # 7x1
- labels = np.array(label_list, ndmin=2) # 1x1
- # Forward propagation
- hidden_layer = self.sigmoid(np.dot(self.input_hidden_weights, inputs))
- output_layer = self.sigmoid(np.dot(self.hidden_output_weights, hidden_layer))
- final_output = output_layer
- # Error function
- output_errors = labels-final_output
- # Backpropagation
- output_delta = output_errors * self.diff_sigm(output_layer)
- hidden_delta = np.dot(self.hidden_output_weights.T, output_delta) * self.diff_sigm(hidden_layer)
- # Update the weights
- self.hidden_output_weights += np.dot(output_delta, hidden_layer.T) * self.lr
- self.input_hidden_weights += np.dot(hidden_delta, inputs.T) * self.lr
- """
- # Backpropagation
- hidden_errors = np.dot(self.hidden_output_weights.T, output_errors)
- hidden_grad = hidden_layer * (1.0 - hidden_layer)
- # Update the weights
- self.hidden_output_weights += self.lr * np.dot(output_errors.T, output_layer.T) # update hidden-to-output weights with gradient descent step
- self.input_hidden_weights += self.lr * np.dot(hidden_errors * hidden_grad, inputs.T) # update input-to-hidden weights with gradient descent step
- """
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement