Advertisement
Guest User

Untitled

a guest
Apr 22nd, 2019
153
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.65 KB | None | 0 0
  1. import util
  2. import numpy as np
  3. import matplotlib.pyplot as plt
  4.  
  5. np.seterr(all='raise')
  6.  
  7.  
  8. factor = 2.0
  9.  
  10. class LinearModel(object):
  11. """Base class for linear models."""
  12.  
  13. def __init__(self,beta=0):
  14. """
  15. Args:
  16. theta: Weights vector for the model.
  17. beta: Regularization coefficient for the model.
  18. """
  19. self.theta = None
  20. self.beta = beta
  21.  
  22. def fit(self, x, y):
  23. """Run solver to fit linear model. You have to update the value of
  24. self.theta using the normal equations.
  25.  
  26. Args:
  27. x: Training example inputs. Shape (n, d).
  28. y: Training example labels. Shape (n,).
  29. """
  30. # *** START CODE HERE ***
  31. self.theta = np.linalg.inv(x.T@x)@x.T@y
  32. # *** END CODE HERE ***
  33.  
  34. def create_poly(self,k,x):
  35. """
  36. Generates a polynomial feature maps using the data x.
  37. The polynomial map should have powers from 0 to k
  38. Output should be a numpy array whose shape is (n, k+1)
  39.  
  40. Args:
  41. x: Training example inputs. Shape (n, 2).
  42. """
  43. # *** START CODE HERE ***
  44. n = x.shape[0]
  45. output = np.zeros([n, k + 1])
  46. for i in range(k + 1):
  47. output[:,i] = np.power(x[:,1],i);
  48. print("create_poly = ", output)
  49. return output
  50. # *** END CODE HERE ***
  51.  
  52. def create_sin(self,k,x):
  53. """
  54. Adds a sin featuremap to the data x.
  55. Output should be a numpy array whose shape is (m, p+1)
  56.  
  57. Args:
  58. x: Training example inputs. Shape (m, p).
  59. """
  60. # *** START CODE HERE ***
  61. # add sign of x to last column
  62. m = x.shape[0]
  63. p = x.shape[1]
  64. print("m, p = ", m, p)
  65. output = np.zeros([m,p+1])
  66. output[:,:-1] = x
  67. output[:,-1] = np.sin(x[:,1])
  68. print("create_sin = ", output)
  69. return output
  70. # *** END CODE HERE ***
  71.  
  72. def predict(self, x):
  73. """
  74. Make a prediction given new inputs x.
  75. Returns the numpy array of the predictions.
  76.  
  77. Args:
  78. x: Inputs of shape (n, d).
  79.  
  80. Returns:
  81. Outputs of shape (n,).
  82. """
  83. # *** START CODE HERE ***
  84. output = x @ self.theta
  85. print("predict = ", output)
  86. return output
  87. # *** END CODE HERE ***
  88.  
  89. def mse(self,y_true,y_pred):
  90. """
  91. Evaluate accuracy of predictions.
  92. Returns the mean square error of the predictions.
  93.  
  94. Args:
  95. y_true: Inputs of shape (n,).
  96. y_pred: Inputs of shape (n,).
  97.  
  98. Returns:
  99. Outputs of shape (1)
  100. """
  101. # *** START CODE HERE ***
  102. # average of squares of errors
  103. n = y_true.shape[0]
  104. mse = np.square(y_true - y_pred).mean()
  105. print("mse = ", mse)
  106. return mse
  107. # *** END CODE HERE ***
  108.  
  109.  
  110. def run_exp(train_path, sine=False, ks=[1, 2, 3, 5, 10, 20], filename='plot.png'):
  111. train_x,train_y=util.load_dataset(train_path,add_intercept=True)
  112. plot_x = np.ones([1000, 2])
  113. plot_x[:, 1] = np.linspace(-factor*np.pi, factor*np.pi, 1000)
  114. plt.scatter(train_x[:, 1], train_y)
  115.  
  116. for k in ks:
  117. '''
  118. Our objective is to train models and perform predictions on plot_x data
  119. '''
  120. # *** START CODE HERE ***
  121. x_train, y_train = util.load_dataset(train_path, add_intercept=True)
  122.  
  123. model = LinearModel()
  124. new_xtrain = model.create_poly(k,x_train)
  125. if (sine):
  126. new_xtrain = model.create_sin(k, new_xtrain)
  127. model.fit(new_xtrain, y_train)
  128. # *** END CODE HERE ***
  129. '''
  130. Here plot_y are the predictions of the linear model on the plot_x data
  131. '''
  132. x_plot = model.create_poly(k, plot_x)
  133. if (sine):
  134. x_plot = LinearModel.create_sin(model, k, x_plot)
  135. plot_y = model.predict(x_plot)
  136.  
  137. plt.ylim(-2, 2)
  138. plt.plot(plot_x[:, 1], plot_y, label='k=%d' % k)
  139.  
  140. plt.legend()
  141. plt.savefig(filename)
  142. plt.clf()
  143.  
  144.  
  145. def main(train_path, small_path, eval_path):
  146. '''
  147. Run all experiments
  148. '''
  149. # *** START CODE HERE ***
  150. # run_exp(train_path, ks = [3], filename = 'part_b.png')
  151. # run_exp(train_path, ks = [3, 5, 10, 20], filename = 'part_c.png')
  152. # run_exp(train_path, sine = True, ks = [1, 2, 3, 5, 10, 20], filename = 'part_d.png')
  153. run_exp(train_path, sine = True, ks = [12], filename = '20.png')
  154. # run_exp(small_path, sine = True, ks = [1, 2, 5, 10, 20], filename = 'part_e.png')
  155. # ADD ZERO LATER
  156.  
  157.  
  158. # *** END CODE HERE ***
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement