Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # XGBClassifier
- print('XGBClassifier--------------------------------------')
- param_grid = {
- 'max_depth':[10,12],
- 'min_child_weight':[3,5],
- 'gamma':[1,2],
- 'subsample':[0.75,0.8],
- 'colsample_bytree':[0.75,0.8]
- }
- # clf = XGBClassifier(learning_rate =0.01)
- grid_search = GridSearchCV(clf, param_grid, n_jobs=-1, cv = skf, verbose=2, scoring='roc_auc').fit(projected, ytrain)
- print('The best parameters are :', grid_search.best_params_)
- print(grid_search.best_score_)
- clf = XGBClassifier(learning_rate =0.01,max_depth=grid_search.best_params_['max_depth'],min_child_weight=grid_search.best_params_['min_child_weight'],gamma=grid_search.best_params_['gamma'],subsample=grid_search.best_params_['subsample'],colsample_bytree=grid_search.best_params_['colsample_bytree'])
- accuracies, auc_rocs, auc_prs = train_K_Fold(projected, ytrain, clf, skf)
- accuracy, auc_roc, auc_pr = get_means_std(accuracies, auc_rocs, auc_prs)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement