Advertisement
Guest User

Untitled

a guest
Oct 19th, 2017
69
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.95 KB | None | 0 0
  1. import numpy as np
  2.  
  3. def nonlin(x,deriv=False):
  4. if(deriv==True):
  5. return x*(1-x)
  6. return 1/(1+np.exp(-x))
  7.  
  8. #input data
  9. X = np.array([[0,0,1],
  10. [0,1,1],
  11. [1,0,1],
  12. [1,1,1]])
  13.  
  14. #output data
  15. y = np.array([[0],
  16. [1],
  17. [1],
  18. [0]])
  19.  
  20. np.random.seed(1)
  21.  
  22. #synapses
  23. syn0 = 2*np.random.random((3,4)) - 1
  24. syn1 = 2*np.random.random((4,1)) - 1
  25.  
  26. #training step
  27. for j in range(60000):
  28. l0 = X
  29. l1 = nonlin(np.dot(l0, syn0))
  30. l2 = nonlin(np.dot(l1, syn1))
  31.  
  32. l2_error = y - l2
  33.  
  34. if(j % 10000) == 0:
  35. print("Error:" + str(np.mean(np.abs(l2_error))))
  36.  
  37. l2_delta = l2_error*nonlin(l2, deriv=True)
  38. l1_error = l2_delta.dot(syn1.T)
  39. l1_delta = l1_error * nonlin(l1,deriv=True)
  40.  
  41. #update weights
  42. syn1 += l1.T.dot(l2_delta)
  43. syn0 += l0.T.dot(l1_delta)
  44.  
  45. print("Output after training")
  46. print(l2)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement