Advertisement
Guest User

Untitled

a guest
Feb 21st, 2020
123
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.92 KB | None | 0 0
  1. # XGBClassifier
  2. print('XGBClassifier--------------------------------------')
  3. param_grid = {
  4. 'max_depth':[10,12],
  5. 'min_child_weight':[3,5],
  6. 'gamma':[1,2],
  7. 'subsample':[0.75,0.8],
  8. 'colsample_bytree':[0.75,0.8]
  9. }
  10. # clf = XGBClassifier(learning_rate =0.01)
  11. grid_search = GridSearchCV(clf, param_grid, n_jobs=-1, cv = skf, verbose=2, scoring='roc_auc').fit(projected, ytrain)
  12. print('The best parameters are :', grid_search.best_params_)
  13. print(grid_search.best_score_)
  14. clf = XGBClassifier(learning_rate =0.01,max_depth=grid_search.best_params_['max_depth'],min_child_weight=grid_search.best_params_['min_child_weight'],gamma=grid_search.best_params_['gamma'],subsample=grid_search.best_params_['subsample'],colsample_bytree=grid_search.best_params_['colsample_bytree'])
  15. accuracies, auc_rocs, auc_prs = train_K_Fold(projected, ytrain, clf, skf)
  16. accuracy, auc_roc, auc_pr = get_means_std(accuracies, auc_rocs, auc_prs)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement