Advertisement
julianzhang

Optional Lab: Feature Engineering and Polynomial Regression

Aug 10th, 2022
848
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.98 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. from lab_utils_multi import zscore_normalize_features, run_gradient_descent_feng
  4. np.set_printoptions(precision=2)  # reduced display precision on numpy arrays
  5. # create target data
  6. x = np.arange(0, 20, 1)
  7. y = 1 + x**2
  8. X = x.reshape(-1, 1)
  9.  
  10. model_w,model_b = run_gradient_descent_feng(X,y,iterations=1000, alpha = 1e-2)
  11.  
  12. plt.scatter(x, y, marker='x', c='r', label="Actual Value"); plt.title("no feature engineering")
  13. plt.plot(x,X@model_w + model_b, label="Predicted Value");  plt.xlabel("X"); plt.ylabel("y"); plt.legend(); plt.show()
  14. # create target data
  15. x = np.arange(0, 20, 1)
  16. y = 1 + x**2
  17.  
  18. # Engineer features
  19. X = x**2      #<-- added engineered feature
  20. X = X.reshape(-1, 1)  #X should be a 2-D Matrix
  21. model_w,model_b = run_gradient_descent_feng(X, y, iterations=10000, alpha = 1e-5)
  22.  
  23. plt.scatter(x, y, marker='x', c='r', label="Actual Value"); plt.title("Added x**2 feature")
  24. plt.plot(x, np.dot(X,model_w) + model_b, label="Predicted Value"); plt.xlabel("x"); plt.ylabel("y"); plt.legend(); plt.show()
  25. # create target data
  26. x = np.arange(0, 20, 1)
  27. y = x**2
  28.  
  29. # engineer features .
  30. X = np.c_[x, x**2, x**3]   #<-- added engineered feature
  31. model_w,model_b = run_gradient_descent_feng(X, y, iterations=10000, alpha=1e-7)
  32.  
  33. plt.scatter(x, y, marker='x', c='r', label="Actual Value"); plt.title("x, x**2, x**3 features")
  34. plt.plot(x, X@model_w + model_b, label="Predicted Value"); plt.xlabel("x"); plt.ylabel("y"); plt.legend(); plt.show()
  35. # create target data
  36. x = np.arange(0, 20, 1)
  37. y = x**2
  38.  
  39. # engineer features .
  40. X = np.c_[x, x**2, x**3]   #<-- added engineered feature
  41. X_features = ['x','x^2','x^3']
  42. fig,ax=plt.subplots(1, 3, figsize=(12, 3), sharey=True)
  43. for i in range(len(ax)):
  44.     ax[i].scatter(X[:,i],y)
  45.     ax[i].set_xlabel(X_features[i])
  46. ax[0].set_ylabel("y")
  47. plt.show()
  48.  
  49. # create target data
  50. x = np.arange(0,20,1)
  51. X = np.c_[x, x**2, x**3]
  52. print(f"Peak to Peak range by column in Raw        X:{np.ptp(X,axis=0)}")
  53.  
  54. # add mean_normalization
  55. X = zscore_normalize_features(X)    
  56. print(f"Peak to Peak range by column in Normalized X:{np.ptp(X,axis=0)}")
  57.  
  58. x = np.arange(0,20,1)
  59. y = x**2
  60.  
  61. X = np.c_[x, x**2, x**3]
  62. X = zscore_normalize_features(X)
  63.  
  64. model_w, model_b = run_gradient_descent_feng(X, y, iterations=100000, alpha=1e-1)
  65.  
  66. plt.scatter(x, y, marker='x', c='r', label="Actual Value"); plt.title("Normalized x x**2, x**3 feature")
  67. plt.plot(x,X@model_w + model_b, label="Predicted Value"); plt.xlabel("x"); plt.ylabel("y"); plt.legend(); plt.show()
  68. x = np.arange(0,20,1)
  69. y = np.cos(x/2)
  70.  
  71. X = np.c_[x, x**2, x**3,x**4, x**5, x**6, x**7, x**8, x**9, x**10, x**11, x**12, x**13]
  72. X = zscore_normalize_features(X)
  73.  
  74. model_w,model_b = run_gradient_descent_feng(X, y, iterations=1000000, alpha = 1e-1)
  75.  
  76. plt.scatter(x, y, marker='x', c='r', label="Actual Value"); plt.title("Normalized x x**2, x**3 feature")
  77. plt.plot(x,X@model_w + model_b, label="Predicted Value"); plt.xlabel("x"); plt.ylabel("y"); plt.legend(); plt.show()
  78.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement