Advertisement
TRADECTO

LAB 8.3

Dec 18th, 2018
517
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.06 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. # Activation function
  4. def sigmoid(t):
  5.     return 1 / (1 + np.exp(-t))
  6.  
  7. # Derivative of sigmoid function
  8. def sigmoid_derivative(p):
  9.     return p * (1 - p)
  10.  
  11. # Class definition
  12. class NeuralNetwork:
  13.     def __init__(self, x, y, n):
  14.         self.input = x
  15.         self.weights1 = np.random.rand(self.input.shape[1], n)  # n nodes in the hidden layer
  16.         self.weights2 = np.random.rand(n, 1)
  17.         self.y = y
  18.         self.output = np.zeros(y.shape)
  19.    
  20.     def feedforward(self):
  21.         self.layer1 = sigmoid(np.dot(self.input, self.weights1))
  22.         self.layer2 = sigmoid(np.dot(self.layer1, self.weights2))
  23.         return self.layer2
  24.    
  25.     def backprop(self):
  26.         d_weights2 = np.dot(self.layer1.T, 2 * (self.y - self.output) * sigmoid_derivative(self.output))
  27.         d_weights1 = np.dot(self.input.T, np.dot(2 * (self.y - self.output) * sigmoid_derivative(self.output),
  28.         self.weights2.T) * sigmoid_derivative(self.layer1))
  29.         self.weights1 += d_weights1
  30.         self.weights2 += d_weights2
  31.  
  32.     def train(self, X, y):
  33.         self.output = self.feedforward()
  34.         self.backprop()
  35.  
  36. # Each row is a training example, each column is [X1, X2, X3]
  37. X=np.random.random((50,3))
  38. y_list=np.array(([((X[n][0]+X[n][1]+X[n][2])/3)for n in range(50)]),dtype=float)
  39. y=np.array(([y_list]), dtype=float).T
  40. loss = []
  41. iterr = [x for x in range (0,1001,1)]
  42. print ("Input : \n" + str(X))
  43. print ("Actual Output: \n" + str(y))
  44. NN = NeuralNetwork(X, y, 5) # number of nodes in the hidden layer
  45. print("\n*** Neural network training started...")
  46. for i in range(1001):  # trains the NN 1,001 times
  47.   loss.append((np.mean (np.square(y-NN.feedforward ()))))
  48.   if i % 100 == 0:
  49.     print("\n*** iteration # " + str(i))
  50.     print("Predicted Output: \n" + str(NN.feedforward()))
  51.     print("Loss function is: \n" + str(np.mean(np.square(y - NN.feedforward()))))  # mean sum squared loss
  52.   NN.train(X, y)
  53.  
  54.  
  55. for i in range(len(loss)):
  56.   print(loss[i])
  57.  
  58. plt.plot(iterr,loss)
  59. plt.savefig('pic_0_1.png')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement