Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import matplotlib.pyplot as plt
- # Activation function
- def sigmoid(t):
- return 1 / (1 + np.exp(-t))
- # Derivative of sigmoid function
- def sigmoid_derivative(p):
- return p * (1 - p)
- # Class definition
- class NeuralNetwork:
- def __init__(self, x, y, n):
- self.input = x
- self.weights1 = np.random.rand(self.input.shape[1], n) # n nodes in the hidden layer
- self.weights2 = np.random.rand(n, 1)
- self.y = y
- self.output = np.zeros(y.shape)
- def feedforward(self):
- self.layer1 = sigmoid(np.dot(self.input, self.weights1))
- self.layer2 = sigmoid(np.dot(self.layer1, self.weights2))
- return self.layer2
- def backprop(self):
- d_weights2 = np.dot(self.layer1.T, 2 * (self.y - self.output) * sigmoid_derivative(self.output))
- d_weights1 = np.dot(self.input.T, np.dot(2 * (self.y - self.output) * sigmoid_derivative(self.output),
- self.weights2.T) * sigmoid_derivative(self.layer1))
- self.weights1 += d_weights1
- self.weights2 += d_weights2
- def train(self, X, y):
- self.output = self.feedforward()
- self.backprop()
- # Each row is a training example, each column is [X1, X2, X3]
- X=np.random.random((50,3))
- y_list=np.array(([((X[n][0]+X[n][1]+X[n][2])/3)for n in range(50)]),dtype=float)
- y=np.array(([y_list]), dtype=float).T
- loss = []
- iterr = [x for x in range (0,1001,1)]
- print ("Input : \n" + str(X))
- print ("Actual Output: \n" + str(y))
- NN = NeuralNetwork(X, y, 5) # number of nodes in the hidden layer
- print("\n*** Neural network training started...")
- for i in range(1001): # trains the NN 1,001 times
- loss.append((np.mean (np.square(y-NN.feedforward ()))))
- if i % 100 == 0:
- print("\n*** iteration # " + str(i))
- print("Predicted Output: \n" + str(NN.feedforward()))
- print("Loss function is: \n" + str(np.mean(np.square(y - NN.feedforward())))) # mean sum squared loss
- NN.train(X, y)
- for i in range(len(loss)):
- print(loss[i])
- plt.plot(iterr,loss)
- plt.savefig('pic_0_1.png')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement