Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import lightgbm as lgb
- np.random.seed(s)
- d_train = lgb.Dataset(X_train, label = y_train)
- d_submission = lgb.Dataset(X, label = y)
- params = {}
- params['learning_rate'] = 30
- params['boosting_type'] = 'rf'
- params['objective'] = 'multiclass'
- params['metric'] = 'multi_logloss'
- params['sub_feature'] = 1 # def 1
- params['num_leaves'] = 350 # def 31
- params['max_depth'] = -1 # -1
- params['num_class'] = 20
- params['min_data_in_leaf'] = 0 # def 20
- # params['lambda_l2'] = 0.4
- params['bagging_fraction'] = 0.9
- params['bagging_freq'] = 10
- params['feature_fraction'] = 0.9
- clf = lgb.train(params, d_train, 100)
- y_pred = clf.predict(X_train)
- y_pred = [np.argmax(y_pred[i]) for i in range(len(y_pred))]
- print(accuracy_score(y_train, y_pred))
- y_pred = clf.predict(X_test)
- y_pred = [np.argmax(y_pred[i]) for i in range(len(y_pred))]
- print(accuracy_score(y_test, y_pred))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement