Advertisement
Guest User

Untitled

a guest
Nov 23rd, 2017
60
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.76 KB | None | 0 0
  1. class LogitReg(Classifier):
  2. def __init__(self, parameters={}):
  3. # Default: no regularization
  4. self.params = {'regwgt': 0.0, 'regularizer': 'None'}
  5. self.reset(parameters)
  6. self.transfer=utils.sigmoid
  7. self.dtransfer = utils.dsigmoid
  8. def reset(self, parameters):
  9. self.resetparams(parameters)
  10. self.weights = None
  11. if self.params['regularizer'] is 'l1':
  12. self.regularizer = (utils.l1, utils.dl1)
  13. elif self.params['regularizer'] is 'l2':
  14. self.regularizer = (utils.l2, utils.dl2)
  15. else:
  16. self.regularizer = (lambda w: 0, lambda w: np.zeros(w.shape,))
  17.  
  18.  
  19. def logit_cost(self, theta, X, y):
  20. """
  21. Compute cost for logistic regression using theta as the parameters.
  22. """
  23. cost=0.0
  24. ### YOUR CODE HERE
  25. h =self.transfer(np.dot(X, theta.T))
  26. cost =-np.dot(y,np.log(h))-np.dot((1-y),np.log(1-h))+self.params['regwgt']*self.regularizer[0](self.weights)
  27. ### END YOUR CODE
  28. return cost
  29.  
  30. def logit_cost_grad(self, theta, X, y):
  31. """
  32. Compute gradients of the cost with respect to theta.
  33. """
  34. grad = np.zeros(len(theta))
  35.  
  36. ### YOUR CODE HERE
  37. theta = np.array(theta, ndmin=2)
  38. h = self.transfer(np.dot(X, theta.T))
  39. ll = -1 * (y / h) + (1 - y) / (1 - h)
  40. result=ll*self.dtransfer(np.dot(X, theta.T))+self.params['regwgt']*self.regularizer[1](self.weights)
  41. ### END YOUR CODE
  42. return result
  43.  
  44. def learn(self, Xtrain, ytrain):
  45. """
  46. Learn the weights using the training data
  47. """
  48. ### YOUR CODE HERE
  49. self.weights = np.zeros(Xtrain.shape[1], )
  50. stepsize = 0.05
  51. epoch = 1000
  52. w = np.zeros(Xtrain.shape[1])
  53. y=np.array(ytrain,ndmin=2)
  54. data=np.concatenate((Xtrain, y.T), axis=1)
  55. for i in range(epoch):
  56. np.random.shuffle(data)
  57. for j in range(Xtrain.shape[0]):
  58. X=np.array(data[j][0:-1],ndmin=2)
  59. Y=np.array(data[j][-1])
  60. w = w - (stepsize / (i + 1)) * self.logit_cost_grad(w,X,Y)*X
  61. self.weights = w
  62.  
  63.  
  64. ### END YOUR CODE
  65.  
  66. def predict(self, Xtest):
  67. """
  68. Use the parameters computed in self.learn to give predictions on new
  69. observations.
  70. """
  71. ytest = np.zeros(Xtest.shape[0], dtype=int)
  72.  
  73. ### YOUR CODE HERE
  74. for i in range(Xtest.shape[0]):
  75.  
  76. p_y1 = self.transfer(np.dot(self.weights, Xtest[i].T))
  77. if p_y1 >= 0.5:
  78. ytest[i] = 1
  79.  
  80. ### END YOUR CODE
  81.  
  82. assert len(ytest) == Xtest.shape[0]
  83. return ytest
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement