Advertisement
Guest User

Untitled

a guest
Jun 19th, 2019
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.36 KB | None | 0 0
  1. import matplotlib.pyplot as plt
  2. import numpy as py
  3. import numpy as np
  4. from numpy import random
  5.  
  6. class Neuron:
  7. def __int__ (self, weight_count):
  8. random.seed(1)
  9. weight_count = 0
  10. self.weights = weight_count = 2 * random.random((1, 1))-1
  11.  
  12. def tanh_derivative (self, x):
  13. return 1 - np.tanh(x) ** 2
  14.  
  15. def step(self,x):
  16. dot_product = np.dot(x, self.weights)
  17. return np.tanh(dot_product)
  18.  
  19. def train (self, iteration, train_inputs, train_outputs):
  20. for i in range(iterations):
  21. output = self.step(train_outputs)
  22. error = train_outputs - output
  23. adjustment = np.dot(train_inputs.T, (error * self.tanh_derivative(output)))
  24. self.weights += adjustment
  25. def function(x):
  26. return 2 * x
  27.  
  28. x = [i/100 for i in range(300)]
  29. y = [Neuron.function(i/100) for i in range(300)]
  30. data = []
  31. for i in range(300):
  32. data.append(Neuron.function(i/100)+random.randint(1, 100)/50)
  33.  
  34. plt.plot(data, "b.")
  35. plt.show()
  36.  
  37. x = np.asarray([x])/100
  38. y = np.asarray([y])/100
  39.  
  40. neurObject = Neuron(300)
  41. x = x.reshape(300, 1)
  42. y = y.T
  43. neuron.train(10000,x,y)
  44. consant = neuron.weights[0][0]
  45. print(constant)
  46. test_data = []
  47. for i in x:
  48. test_data.append(i*100*constant)
  49.  
  50. plt.plot(data, "bo")
  51. plt.plot(test_data, "r-")
  52. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement