Advertisement
Guest User

Untitled

a guest
Jun 26th, 2019
56
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.67 KB | None | 0 0
  1. from sklearn.linear_model import Ridge
  2. def stacking_model_input(models, model_name, data):
  3. X_test, y_test, X_train, y_train = data
  4. X_data = [X_test, X_train]
  5. stack_Xs = [pd.DataFrame(), pd.DataFrame()]
  6. for i, model in enumerate(models):
  7. model.fit(X_train, y_train)
  8. for j, x in enumerate(X_data):
  9. preds = model.predict(x)
  10. preds = preds.reshape(len(preds))
  11. stack_Xs[j][model_name[i]] = preds
  12. return stack_Xs[0], stack_Xs[1]
  13.  
  14.  
  15. def ensemble_stacking_R2(models, model_name, Data, alpha, k_fold = 5):
  16. R_squares = []
  17. kf = KFold(n_splits = k_fold)
  18.  
  19. for train_index, test_index in kf.split(X):
  20. split_data = split_test_train(Data, (train_index, test_index))
  21. y_test, y_train = split_data[1], split_data[3]
  22.  
  23. stack_X_test, stack_X_train = stacking_model_input(models, model_name, split_data)
  24. # print(stack_X_test, y_test)
  25. reg = Ridge(alpha = alpha).fit(stack_X_train, y_train)
  26. y_pred = reg.predict(stack_X_test)
  27. R_squares.append(r2_score(y_test, y_pred))
  28. return np.array([R_squares]).squeeze(axis=0)
  29.  
  30.  
  31. Data = (X, Y)
  32. models = [SVR_model, KRR_model, RFG_model, GBR_model]
  33. # models = [SVR_model, KRR_model]
  34. model_name = ['Support Vector Regression', 'Kernel Ridge Regression'
  35. , 'Random Forest Regression', 'Gradient Boosting Regression']
  36.  
  37. alphas = [10, 20, 30, 50]
  38. for alpha in alphas:
  39. stacking_scores = ensemble_stacking_R2(models, model_name, Data, alpha)
  40. print(alpha)
  41. print(stacking_scores)
  42. print('the R2 of stacking method is %.2f with std %.2f' %(stacking_scores.mean(), stacking_scores.std()))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement