Advertisement
Guest User

Untitled

a guest
Oct 21st, 2017
78
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.76 KB | None | 0 0
  1. kf = StratifiedKFold(n_splits=5, shuffle=True, random_state=RND_SEED)
  2. top_score = 10
  3. top_classifier = None
  4.  
  5. #from sklearn.grid_search import GridSearchCV
  6. for train_index, test_index in kf.split(X_train, y_train):
  7. for n_estimators in [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]:
  8. for max_depth in [1, 3, 10]:
  9. for loss in ["deviance", "exponential"]:
  10. gbc = GradientBoostingClassifier(n_estimators=n_estimators, max_depth=max_depth,loss=loss)
  11. X_train2, X_test2 = X_train.as_matrix()[train_index], X_train.as_matrix()[test_index]
  12. y_train2, y_test2 = y_train.as_matrix()[train_index], y_train.as_matrix()[test_index]
  13. gbc.fit(X_train2, y_train2)
  14. current_score = gbc.score(X_test2, y_test2)
  15. if current_score < top_score:
  16. top_score = current_score
  17. top_classifier = gbc
  18.  
  19. from sklearn.ensemble import AdaBoostClassifier
  20.  
  21. kf = StratifiedKFold(n_splits=5, shuffle=True, random_state=RND_SEED)
  22. top_score = 10
  23. top_classifier = None
  24.  
  25. #from sklearn.grid_search import GridSearchCV
  26. for train_index, test_index in kf.split(X_train, y_train):
  27. for n_estimators in [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]:
  28. adb = AdaBoostClassifier(n_estimators=n_estimators)
  29. X_train2, X_test2 = X_train.as_matrix()[train_index], X_train.as_matrix()[test_index]
  30. y_train2, y_test2 = y_train.as_matrix()[train_index], y_train.as_matrix()[test_index]
  31. adb.fit(X_train2, y_train2)
  32. current_score = adb.score(X_test2, y_test2)
  33. if current_score < top_score:
  34. top_score = current_score
  35. top_classifier = adb
  36.  
  37. #Ada
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement