Guest User

Untitled

a guest
Jan 23rd, 2019
80
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.92 KB | None | 0 0
  1. # Re-iterate the linear regression model, this time creating a list of features and their corresponding coefficients.
  2. from sklearn.linear_model import LinearRegression
  3. linreg_rmse = []
  4. coef_list = []
  5. for i in range(10):
  6. X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.15)
  7. regressor = LinearRegression()
  8. regressor.fit(X_train, y_train)
  9. y_pred = regressor.predict(X_test)
  10. rmse = (mean_squared_error(y_test, y_pred)) ** 0.5
  11. linreg_rmse.append(rmse)
  12. coef = regressor.coef_
  13. coef_list.append(coef)
  14. features = tv.get_feature_names()
  15. features = pd.DataFrame(features)
  16. coef_list = pd.DataFrame(coef_list)
  17. coef_list = coef_list.transpose()
  18. coef_features = pd.concat([features, coef_list], axis = 1, sort = False)
  19. coef_features.columns = ['Feature', 'Iteration 1', 'Iteration 2', 'Iteration 3', 'Iteration 4', 'Iteration 5', 'Iteration 6', 'Iteration 7', 'Iteration 8', 'Iteration 9', 'Iteration 10']
Add Comment
Please, Sign In to add comment