Advertisement
Guest User

Untitled

a guest
Oct 25th, 2016
78
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.75 KB | None | 0 0
  1. ##
  2. #   LOGISTIC REGRESSION
  3. ##
  4. def sigmoid(x):
  5.     """apply NUMERICALLY STABLE sigmoid function on x."""
  6.     # ***************************************************
  7.     # if x >= 0:
  8.     #     z = np.exp(-x)
  9.     #     return 1 / (1 + z)
  10.     # else:
  11.     #     z = np.exp(x)
  12.     #     return z / (1 + z)
  13.     return 1 / (1 + np.exp(-x))
  14.  
  15. def calculate_logistic_gradient(y, tx, w):
  16.     """compute the gradient of loss."""
  17.     # ***************************************************
  18.     #sig_function  = np.vectorize(sigmoid)
  19.     sig = sigmoid(np.dot(tx, w))
  20.     return np.dot(tx.T, (sig - y) )
  21.  
  22. def logistic_regression(y, tx, gamma, max_iter):
  23.     """
  24.    Logistic regression using gradient descent.
  25.    Return the loss and the updated w.
  26.    """
  27.     # start the logistic regression
  28.     w = np.zeros((tx.shape[1],1))
  29.     for iter in range(max_iter):
  30.         # ***************************************************
  31.         # compute the gradient
  32.         gradient = calculate_logistic_gradient(y, tx, w)
  33.         # ***************************************************
  34.         # update w
  35.         # Tracer()()
  36.         w = w - gamma * gradient
  37.     #loss = calculate_loss_by_likelyhood(y, tx, w)
  38.     loss = compute_loss(y, tx, w)
  39.     return loss, w
  40.  
  41. def calculate_hessian(y, tx, w):
  42.     """return the hessian of the loss function."""
  43.     # ***************************************************
  44.     sig_function  = np.vectorize(sigmoid)
  45.     sig = sig_function(np.dot(tx, w))
  46.     s_nn = sig*(1-sig)
  47.     Tracer()()
  48.     S = np.diag(np.ndarray.flatten(s_nn))
  49.     return np.dot(tx.T, np.dot(S, tx))
  50.  
  51. def penalized_logistic_regression(y, tx, w, lambd):
  52.     """return the loss, gradient, and hessian."""
  53.     # ***************************************************
  54.     # return loss, gradient, and hessian
  55.     #loss = calculate_loss_by_likelyhood(y, tx, w)
  56.     gradient = calculate_logistic_gradient(y, tx, w)
  57.     hessian = calculate_hessian(y, tx, w)
  58.     #loss_penalty = lambd * np.sum(np.power(w, 2))
  59.     gradient_penalty = lambd * 2 * w
  60.     return gradient + gradient_penalty, hessian
  61.  
  62. def reg_logistic_regression(y, tx, lambd , gamma, max_iters):
  63.     """
  64.    Penalized logistic regression using gradient descent.
  65.    Return the loss and the updated w.
  66.    """
  67.     # start the logistic regression
  68.     w = np.zeros((tx.shape[1],1))
  69.     for iter in range(max_iters):
  70.         # get loss and update w.
  71.         gradient, hessian = penalized_logistic_regression(y, tx, w, lambd)
  72.         # ***************************************************
  73.         # update w
  74.         w = w - gamma * np.dot(np.linalg.inv(hessian), gradient)
  75.         # log info
  76.     loss_penalty = lambd * np.sum(np.power(w, 2))
  77.     loss = calculate_loss_by_likelyhood(y, tx, w) + loss_penalty
  78.     return loss, w
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement