Guest User

Untitled

a guest
Mar 22nd, 2018
111
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.07 KB | None | 0 0
  1. import numpy as np
  2. from sklearn.utils import shuffle
  3.  
  4.  
  5. # Data comes from y = f(x) = [2, 3].x + [5, 7]
  6. X0 = np.random.randn(100, 2) - 1
  7. X1 = np.random.randn(100, 2) + 1
  8. X = np.vstack([X0, X1])
  9. t = np.vstack([np.zeros([100, 1]), np.ones([100, 1])])
  10.  
  11. X, t = shuffle(X, t)
  12.  
  13. X_train, X_test = X[:150], X[:50]
  14. t_train, t_test = t[:150], t[:50]
  15.  
  16. # Model
  17. W = np.random.randn(2, 1) * 0.01
  18.  
  19.  
  20. def sigm(x):
  21. return 1/(1+np.exp(-x))
  22.  
  23.  
  24. def NLL(y, t):
  25. return -np.mean(t*np.log(y) + (1-t)*np.log(1-y))
  26.  
  27.  
  28. alpha = 0.1
  29.  
  30. # Training
  31. for it in range(5):
  32. # Forward
  33. z = X_train @ W
  34. y = sigm(z)
  35. loss = NLL(y, t_train)
  36.  
  37. # Loss
  38. print(f'Loss: {loss:.3f}')
  39.  
  40. m = y.shape[0]
  41.  
  42. dy = (y-t_train)/(m * (y - y*y))
  43. dz = sigm(z)*(1-sigm(z))
  44. dW = X_train.T @ (dz * dy)
  45.  
  46. grad_loglik_z = (t_train-y)/(y - y*y) * dz
  47. grad_loglik_W = grad_loglik_z * X_train
  48. F = np.cov(grad_loglik_W.T)
  49.  
  50. # Step
  51. W = W - alpha * np.linalg.inv(F) @ dW
  52. # W = W - alpha * dW
  53.  
  54. # print(W)
  55.  
  56. y = sigm(X_test @ W).ravel()
  57. acc = np.mean((y >= 0.5) == t_test.ravel())
  58.  
  59. print(f'Accuracy: {acc:.3f}')
Add Comment
Please, Sign In to add comment