Advertisement
Guest User

Untitled

a guest
Jun 26th, 2017
54
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.32 KB | None | 0 0
  1. """
  2. Check that the gradient of the logistic regression is correct
  3. """
  4.  
  5. import numpy as np
  6.  
  7. BIG = 1e12
  8.  
  9. def phi(t):
  10. """
  11. logistic function, returns 1 / (1 + exp(-t))
  12. """
  13. return 1. / (1 + np.exp(-t))
  14.  
  15. def f_obj(w, theta, X, y):
  16. """
  17. Objective function, done with care
  18. """
  19. loss = 0.
  20. unique_y = np.sort(np.unique(y))
  21. for i in range(X.shape[0]):
  22. if y[i] == unique_y[0]:
  23. loss -= np.log(phi(theta[y[i]] - X[i].dot(w)))
  24. else:
  25. loss -= np.log(phi(theta[y[i]] - X[i].dot(w)) - phi(theta[y[i]-1] - X[i].dot(w)))
  26.  
  27. return loss
  28.  
  29. def f_grad_w(w, theta, X, y):
  30. """Gradient with respect to w"""
  31. unique_y = np.sort(np.unique(y))
  32. grad = np.zeros(w.size)
  33. for i in range(X.shape[0]):
  34. if y[i] == unique_y[0]:
  35. grad += X[i].dot(1 - phi(theta[y[i]] - X[i].dot(w)))
  36. else:
  37. grad += X[i].dot(1 - phi(theta[y[i]] - X[i].dot(w)) - phi(theta[y[i]-1] - X[i].dot(w)))
  38.  
  39. return grad
  40.  
  41. if __name__ == '__main__':
  42. n_samples, n_features = 100, 10
  43. X = np.random.randn(n_samples, n_features)
  44. w0 = np.random.randn(n_features)
  45. y = np.arange(n_samples) // 20
  46. theta = np.arange(np.unique(y).size)
  47.  
  48. from scipy import optimize
  49. print('Output of check_grad: %s' % optimize.check_grad(f_obj, f_grad_w, w0, theta, X, y))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement