Advertisement
julianzhang

Optional Lab: Gradient Descent for Linear Regression

Aug 9th, 2022
1,393
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 5.05 KB | None | 0 0
  1. import math, copy
  2. import numpy as np
  3. import matplotlib.pyplot as plt
  4. plt.style.use('./deeplearning.mplstyle')
  5. from lab_utils_uni import plt_house_x, plt_contour_wgrad, plt_divergence, plt_gradients
  6.  
  7. # Load our data set
  8. x_train = np.array([1.0, 2.0])   #features
  9. y_train = np.array([300.0, 500.0])   #target value
  10.  
  11. # Function to calculate the cost
  12. def compute_cost(x, y, w, b):
  13.     m = x.shape[0]
  14.     cost = 0
  15.  
  16.     for i in range(m):
  17.         f_wb = w * x[i] + b
  18.         cost = cost + (f_wb - y[i]) ** 2
  19.     total_cost = 1 / (2 * m) * cost
  20.  
  21.     return total_cost
  22.  
  23.  
  24. def compute_gradient(x, y, w, b):
  25.     """
  26.    Computes the gradient for linear regression
  27.    Args:
  28.      x (ndarray (m,)): Data, m examples
  29.      y (ndarray (m,)): target values
  30.      w,b (scalar)    : model parameters
  31.    Returns
  32.      dj_dw (scalar): The gradient of the cost w.r.t. the parameters w
  33.      dj_db (scalar): The gradient of the cost w.r.t. the parameter b
  34.     """
  35.  
  36.     # Number of training examples
  37.     m = x.shape[0]
  38.     dj_dw = 0
  39.     dj_db = 0
  40.  
  41.     for i in range(m):
  42.         f_wb = w * x[i] + b
  43.         dj_dw_i = (f_wb - y[i]) * x[i]
  44.         dj_db_i = f_wb - y[i]
  45.         dj_db += dj_db_i
  46.         dj_dw += dj_dw_i
  47.     dj_dw = dj_dw / m
  48.     dj_db = dj_db / m
  49.  
  50.     return dj_dw, dj_db
  51. plt_gradients(x_train,y_train, compute_cost, compute_gradient)
  52. plt.show()
  53.  
  54.  
  55. def gradient_descent(x, y, w_in, b_in, alpha, num_iters, cost_function, gradient_function):
  56.     """
  57.    Performs gradient descent to fit w,b. Updates w,b by taking
  58.    num_iters gradient steps with learning rate alpha
  59.  
  60.    Args:
  61.      x (ndarray (m,))  : Data, m examples
  62.      y (ndarray (m,))  : target values
  63.      w_in,b_in (scalar): initial values of model parameters
  64.      alpha (float):     Learning rate
  65.      num_iters (int):   number of iterations to run gradient descent
  66.      cost_function:     function to call to produce cost
  67.      gradient_function: function to call to produce gradient
  68.  
  69.    Returns:
  70.      w (scalar): Updated value of parameter after running gradient descent
  71.      b (scalar): Updated value of parameter after running gradient descent
  72.      J_history (List): History of cost values
  73.      p_history (list): History of parameters [w,b]
  74.      """
  75.  
  76.     w = copy.deepcopy(w_in)  # avoid modifying global w_in
  77.     # An array to store cost J and w's at each iteration primarily for graphing later
  78.     J_history = []
  79.     p_history = []
  80.     b = b_in
  81.     w = w_in
  82.  
  83.     for i in range(num_iters):
  84.         # Calculate the gradient and update the parameters using gradient_function
  85.         dj_dw, dj_db = gradient_function(x, y, w, b)
  86.  
  87.         # Update Parameters using equation (3) above
  88.         b = b - alpha * dj_db
  89.         w = w - alpha * dj_dw
  90.  
  91.         # Save cost J at each iteration
  92.         if i < 100000:  # prevent resource exhaustion
  93.             J_history.append(cost_function(x, y, w, b))
  94.             p_history.append([w, b])
  95.         # Print cost every at intervals 10 times or as many iterations if < 10
  96.         if i % math.ceil(num_iters / 10) == 0:
  97.             print(f"Iteration {i:4}: Cost {J_history[-1]:0.2e} ",
  98.                   f"dj_dw: {dj_dw: 0.3e}, dj_db: {dj_db: 0.3e}  ",
  99.                   f"w: {w: 0.3e}, b:{b: 0.5e}")
  100.  
  101.     return w, b, J_history, p_history  # return w and J,w history for graphing
  102. # initialize parameters
  103. w_init = 0
  104. b_init = 0
  105. # some gradient descent settings
  106. iterations = 10000
  107. tmp_alpha = 1.0e-2
  108. # run gradient descent
  109. w_final, b_final, J_hist, p_hist = gradient_descent(x_train ,y_train, w_init, b_init, tmp_alpha,
  110.                                                     iterations, compute_cost, compute_gradient)
  111. print(f"(w,b) found by gradient descent: ({w_final:8.4f},{b_final:8.4f})")
  112. # plot cost versus iteration
  113. fig, (ax1, ax2) = plt.subplots(1, 2, constrained_layout=True, figsize=(12,4))
  114. ax1.plot(J_hist[:100])
  115. ax2.plot(1000 + np.arange(len(J_hist[1000:])), J_hist[1000:])
  116. ax1.set_title("Cost vs. iteration(start)");  ax2.set_title("Cost vs. iteration (end)")
  117. ax1.set_ylabel('Cost')            ;  ax2.set_ylabel('Cost')
  118. ax1.set_xlabel('iteration step')  ;  ax2.set_xlabel('iteration step')
  119. plt.show()
  120. print(f"1000 sqft house prediction {w_final*1.0 + b_final:0.1f} Thousand dollars")
  121. print(f"1200 sqft house prediction {w_final*1.2 + b_final:0.1f} Thousand dollars")
  122. print(f"2000 sqft house prediction {w_final*2.0 + b_final:0.1f} Thousand dollars")
  123. fig, ax = plt.subplots(1,1, figsize=(12, 6))
  124. plt_contour_wgrad(x_train, y_train, p_hist, ax)
  125. fig, ax = plt.subplots(1,1, figsize=(12, 4))
  126. plt_contour_wgrad(x_train, y_train, p_hist, ax, w_range=[180, 220, 0.5], b_range=[80, 120, 0.5],
  127.             contours=[1,5,10,20],resolution=0.5)
  128. # initialize parameters
  129. w_init = 0
  130. b_init = 0
  131. # set alpha to a large value
  132. iterations = 10
  133. tmp_alpha = 8.0e-1
  134. # run gradient descent
  135. w_final, b_final, J_hist, p_hist = gradient_descent(x_train ,y_train, w_init, b_init, tmp_alpha,
  136.                                                     iterations, compute_cost, compute_gradient)
  137. plt_divergence(p_hist, J_hist,x_train, y_train)
  138. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement