Advertisement
Guest User

Untitled

a guest
Jan 22nd, 2019
84
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.92 KB | None | 0 0
  1. import lightgbm as lgb
  2.  
  3. np.random.seed(s)
  4.  
  5. d_train = lgb.Dataset(X_train, label = y_train)
  6. d_submission = lgb.Dataset(X, label = y)
  7.  
  8. params = {}
  9. params['learning_rate'] = 30
  10. params['boosting_type'] = 'rf'
  11. params['objective'] = 'multiclass'
  12. params['metric'] = 'multi_logloss'
  13. params['sub_feature'] = 1 # def 1
  14. params['num_leaves'] = 350 # def 31
  15. params['max_depth'] = -1 # -1
  16. params['num_class'] = 20
  17. params['min_data_in_leaf'] = 0 # def 20
  18. # params['lambda_l2'] = 0.4
  19.  
  20.  
  21. params['bagging_fraction'] = 0.9
  22. params['bagging_freq'] = 10
  23. params['feature_fraction'] = 0.9
  24.  
  25.  
  26. clf = lgb.train(params, d_train, 100)
  27.  
  28. y_pred = clf.predict(X_train)
  29. y_pred = [np.argmax(y_pred[i]) for i in range(len(y_pred))]
  30.  
  31. print(accuracy_score(y_train, y_pred))
  32.  
  33. y_pred = clf.predict(X_test)
  34. y_pred = [np.argmax(y_pred[i]) for i in range(len(y_pred))]
  35.  
  36. print(accuracy_score(y_test, y_pred))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement