Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- kf = StratifiedKFold(n_splits=5, shuffle=True, random_state=RND_SEED)
- top_score = 10
- top_classifier = None
- #from sklearn.grid_search import GridSearchCV
- for train_index, test_index in kf.split(X_train, y_train):
- for n_estimators in [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]:
- for max_depth in [1, 3, 10]:
- for loss in ["deviance", "exponential"]:
- gbc = GradientBoostingClassifier(n_estimators=n_estimators, max_depth=max_depth,loss=loss)
- X_train2, X_test2 = X_train.as_matrix()[train_index], X_train.as_matrix()[test_index]
- y_train2, y_test2 = y_train.as_matrix()[train_index], y_train.as_matrix()[test_index]
- gbc.fit(X_train2, y_train2)
- current_score = gbc.score(X_test2, y_test2)
- if current_score < top_score:
- top_score = current_score
- top_classifier = gbc
- from sklearn.ensemble import AdaBoostClassifier
- kf = StratifiedKFold(n_splits=5, shuffle=True, random_state=RND_SEED)
- top_score = 10
- top_classifier = None
- #from sklearn.grid_search import GridSearchCV
- for train_index, test_index in kf.split(X_train, y_train):
- for n_estimators in [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]:
- adb = AdaBoostClassifier(n_estimators=n_estimators)
- X_train2, X_test2 = X_train.as_matrix()[train_index], X_train.as_matrix()[test_index]
- y_train2, y_test2 = y_train.as_matrix()[train_index], y_train.as_matrix()[test_index]
- adb.fit(X_train2, y_train2)
- current_score = adb.score(X_test2, y_test2)
- if current_score < top_score:
- top_score = current_score
- top_classifier = adb
- #Ada
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement