Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- """
- Check that the gradient of the logistic regression is correct
- """
- import numpy as np
- BIG = 1e12
- def phi(t):
- """
- logistic function, returns 1 / (1 + exp(-t))
- """
- return 1. / (1 + np.exp(-t))
- def f_obj(w, theta, X, y):
- """
- Objective function, done with care
- """
- loss = 0.
- unique_y = np.sort(np.unique(y))
- for i in range(X.shape[0]):
- if y[i] == unique_y[0]:
- loss -= np.log(phi(theta[y[i]] - X[i].dot(w)))
- else:
- loss -= np.log(phi(theta[y[i]] - X[i].dot(w)) - phi(theta[y[i]-1] - X[i].dot(w)))
- return loss
- def f_grad_w(w, theta, X, y):
- """Gradient with respect to w"""
- unique_y = np.sort(np.unique(y))
- grad = np.zeros(w.size)
- for i in range(X.shape[0]):
- if y[i] == unique_y[0]:
- grad += X[i].dot(1 - phi(theta[y[i]] - X[i].dot(w)))
- else:
- grad += X[i].dot(1 - phi(theta[y[i]] - X[i].dot(w)) - phi(theta[y[i]-1] - X[i].dot(w)))
- return grad
- if __name__ == '__main__':
- n_samples, n_features = 100, 10
- X = np.random.randn(n_samples, n_features)
- w0 = np.random.randn(n_features)
- y = np.arange(n_samples) // 20
- theta = np.arange(np.unique(y).size)
- from scipy import optimize
- print('Output of check_grad: %s' % optimize.check_grad(f_obj, f_grad_w, w0, theta, X, y))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement