Advertisement
Guest User

Untitled

a guest
Jul 16th, 2019
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 7.71 KB | None | 0 0
  1. ###############################################################################
  2. # 5. Hyper-parameters #
  3. ###############################################################################
  4. # Initiate parameter grid
  5. parameters = {}
  6.  
  7. # Update dict with LDA
  8. parameters.update({"LDA": {"classifier__solver": ["svd"],
  9. }})
  10.  
  11. # Update dict with QDA
  12. parameters.update({"QDA": {"classifier__reg_param":[0.01*ii for ii in range(0, 101)],
  13. }})
  14. # Update dict with AdaBoost
  15. parameters.update({"AdaBoost": {
  16. "classifier__base_estimator": [DecisionTreeClassifier(max_depth = ii) for ii in range(1,6)],
  17. "classifier__n_estimators": [200],
  18. "classifier__learning_rate": [0.001, 0.01, 0.05, 0.1, 0.25, 0.50, 0.75, 1.0]
  19. }})
  20.  
  21. # Update dict with Bagging
  22. parameters.update({"Bagging": {
  23. "classifier__base_estimator": [DecisionTreeClassifier(max_depth = ii) for ii in range(1,6)],
  24. "classifier__n_estimators": [200],
  25. "classifier__max_features": [0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
  26. "classifier__n_jobs": [-1]
  27. }})
  28.  
  29. # Update dict with Gradient Boosting
  30. parameters.update({"Gradient Boosting": {
  31. "classifier__learning_rate":[0.15,0.1,0.05,0.01,0.005,0.001],
  32. "classifier__n_estimators": [200],
  33. "classifier__max_depth": [2,3,4,5,6],
  34. "classifier__min_samples_split": [0.005, 0.01, 0.05, 0.10],
  35. "classifier__min_samples_leaf": [0.005, 0.01, 0.05, 0.10],
  36. "classifier__max_features": ["auto", "sqrt", "log2"],
  37. "classifier__subsample": [0.8, 0.9, 1]
  38. }})
  39.  
  40.  
  41. # Update dict with Extra Trees
  42. parameters.update({"Extra Trees Ensemble": {
  43. "classifier__n_estimators": [200],
  44. "classifier__class_weight": [None, "balanced"],
  45. "classifier__max_features": ["auto", "sqrt", "log2"],
  46. "classifier__max_depth" : [3, 4, 5, 6, 7, 8],
  47. "classifier__min_samples_split": [0.005, 0.01, 0.05, 0.10],
  48. "classifier__min_samples_leaf": [0.005, 0.01, 0.05, 0.10],
  49. "classifier__criterion" :["gini", "entropy"] ,
  50. "classifier__n_jobs": [-1]
  51. }})
  52.  
  53.  
  54. # Update dict with Random Forest Parameters
  55. parameters.update({"Random Forest": {
  56. "classifier__n_estimators": [200],
  57. "classifier__class_weight": [None, "balanced"],
  58. "classifier__max_features": ["auto", "sqrt", "log2"],
  59. "classifier__max_depth" : [3, 4, 5, 6, 7, 8],
  60. "classifier__min_samples_split": [0.005, 0.01, 0.05, 0.10],
  61. "classifier__min_samples_leaf": [0.005, 0.01, 0.05, 0.10],
  62. "classifier__criterion" :["gini", "entropy"] ,
  63. "classifier__n_jobs": [-1]
  64. }})
  65.  
  66. # Update dict with Ridge
  67. parameters.update({"Ridge": {
  68. "classifier__alpha": [1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 0.25, 0.50, 0.75, 1.0]
  69. }})
  70.  
  71. # Update dict with SGD Classifier
  72. parameters.update({"SGD": {
  73. "classifier__alpha": [1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 0.25, 0.50, 0.75, 1.0],
  74. "classifier__penalty": ["l1", "l2"],
  75. "classifier__n_jobs": [-1]
  76. }})
  77.  
  78.  
  79. # Update dict with BernoulliNB Classifier
  80. parameters.update({"BNB": {
  81. "classifier__alpha": [1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 0.25, 0.50, 0.75, 1.0]
  82. }})
  83.  
  84. # Update dict with GaussianNB Classifier
  85. parameters.update({"GNB": {
  86. "classifier__var_smoothing": [1e-9, 1e-8,1e-7, 1e-6, 1e-5]
  87. }})
  88.  
  89. # Update dict with K Nearest Neighbors Classifier
  90. parameters.update({"KNN": {
  91. "classifier__n_neighbors": list(range(1,31)),
  92. "classifier__p": [1, 2, 3, 4, 5],
  93. "classifier__leaf_size": [5, 10, 15, 20, 25, 30, 35, 40, 45, 50],
  94. "classifier__n_jobs": [-1]
  95. }})
  96.  
  97. # Update dict with MLPClassifier
  98. parameters.update({"MLP": {
  99. "classifier__hidden_layer_sizes": [(5), (10), (5,5), (10,10), (5,5,5), (10,10,10)],
  100. "classifier__activation": ["identity", "logistic", "tanh", "relu"],
  101. "classifier__learning_rate": ["constant", "invscaling", "adaptive"],
  102. "classifier__max_iter": [100, 200, 300, 500, 1000, 2000],
  103. "classifier__alpha": list(10.0 ** -np.arange(1, 10)),
  104. }})
  105.  
  106. parameters.update({"LSVC": {
  107. "classifier__penalty": ["l2"],
  108. "classifier__C": [0.0001, 0.001, 0.01, 0.1, 1.0, 10, 100]
  109. }})
  110.  
  111. parameters.update({"NuSVC": {
  112. "classifier__nu": [0.25, 0.50, 0.75],
  113. "classifier__kernel": ["linear", "rbf", "poly"],
  114. "classifier__degree": [1,2,3,4,5,6],
  115. }})
  116.  
  117. parameters.update({"SVC": {
  118. "classifier__kernel": ["linear", "rbf", "poly"],
  119. "classifier__gamma": ["auto"],
  120. "classifier__C": [0.1, 0.5, 1, 5, 10, 50, 100],
  121. "classifier__degree": [1, 2, 3, 4, 5, 6]
  122. }})
  123.  
  124.  
  125. # Update dict with Decision Tree Classifier
  126. parameters.update({"DTC": {
  127. "classifier__criterion" :["gini", "entropy"],
  128. "classifier__splitter": ["best", "random"],
  129. "classifier__class_weight": [None, "balanced"],
  130. "classifier__max_features": ["auto", "sqrt", "log2"],
  131. "classifier__max_depth" : [1,2,3, 4, 5, 6, 7, 8],
  132. "classifier__min_samples_split": [0.005, 0.01, 0.05, 0.10],
  133. "classifier__min_samples_leaf": [0.005, 0.01, 0.05, 0.10],
  134. }})
  135.  
  136. # Update dict with Extra Tree Classifier
  137. parameters.update({"ETC": {
  138. "classifier__criterion" :["gini", "entropy"],
  139. "classifier__splitter": ["best", "random"],
  140. "classifier__class_weight": [None, "balanced"],
  141. "classifier__max_features": ["auto", "sqrt", "log2"],
  142. "classifier__max_depth" : [1,2,3, 4, 5, 6, 7, 8],
  143. "classifier__min_samples_split": [0.005, 0.01, 0.05, 0.10],
  144. "classifier__min_samples_leaf": [0.005, 0.01, 0.05, 0.10],
  145. }})
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement