Advertisement
Guest User

Untitled

a guest
Mar 22nd, 2019
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.64 KB | None | 0 0
  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3.  
  4. def sigmoid(x):
  5. return 1.0/(1+ np.exp(-x))
  6.  
  7. def sigmoid_derivative(x):
  8. return x * (1.0 - x)
  9.  
  10. class NeuralNetwork:
  11. def __init__(self, x, y):
  12. self.input = x
  13. self.weights1 = np.random.rand(self.input.shape[1],3)
  14. self.weights2 = np.random.rand(3,1)
  15. self.y = y
  16. self.output = np.zeros(self.y.shape)
  17.  
  18. def feedforward(self):
  19. self.layer1 = sigmoid(np.dot(self.input, self.weights1))
  20. self.output = sigmoid(np.dot(self.layer1, self.weights2))
  21.  
  22. def backprop(self):
  23. # application of the chain rule to find derivative of the loss function with respect to weights2 and weights1
  24. d_weights2 = np.dot(self.layer1.T, (2*(self.y - self.output) * sigmoid_derivative(self.output)))
  25. d_weights1 = np.dot(self.input.T, (np.dot(2*(self.y - self.output) * sigmoid_derivative(self.output), self.weights2.T) * sigmoid_derivative(self.layer1)))
  26.  
  27. # update the weights with the derivative (slope) of the loss function
  28. self.weights1 += d_weights1
  29. self.weights2 += d_weights2
  30.  
  31.  
  32. if __name__ == "__main__":
  33. X = np.array([[1/31,1/24,15/60],
  34. [1/31,1/24,30/60],
  35. [1/31,1/24,45/60],
  36. [1/31,1/24,60/60]])
  37. y = np.array([[15521.7/15534.1],[15534.1/15534.1],[15404.4/15534.1],[15196.3/15534.1]])
  38. nn = NeuralNetwork(X,y)
  39.  
  40. for i in range(10000):
  41. nn.feedforward()
  42. nn.backprop()
  43.  
  44. print(nn.output)
  45. print(X)
  46. #g=np.linspace(0.15,1,4)
  47. #plt.plot(g,y)
  48. #plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement