Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def output_loo(X, y, n_features=range(5, 31, 5)):
- scaling = preprocessing.StandardScaler()
- scaling2 = MinMaxScaler()
- svml1 = svm.LinearSVC(C=0.5, loss='l2', penalty='l1', dual=False)
- lrl1 = LogisticRegression(C=1, penalty='l1', tol=0.4)
- svml2 = svm.LinearSVC(C=0.5, loss='l2', penalty='l2', dual=False)
- lrl2 = LogisticRegression(C=1, penalty='l2', tol=0.4)
- svml = svm.LinearSVC()
- lrl=LogisticRegression()
- cv=LeaveOneOut(len(y))
- all_loo=[]
- nf=[]
- for num in n_features:
- allchi2 = SelectKBest(chi2, k=num)
- my_svm11 = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('svml1', svml1)])
- my_svm21 = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('svml2', svml2)])
- my_svm = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('svm', svml)])
- my_lr11 = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('lrl1', lrl1)])
- my_lr21 = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('lrl2', lrl2)])
- my_lr = Pipeline([('scaling', scaling2), ('chi2', allchi2), ('lr', lrl)])
- loo=[]
- loo.append(skl.cross_validation.cross_val_score(my_svm11, X, y, cv=cv, scoring="accuracy").mean())
- loo.append(skl.cross_validation.cross_val_score(my_svm21, X, y, cv=cv, scoring="accuracy").mean())
- loo.append(skl.cross_validation.cross_val_score(my_svm, X, y, cv=cv, scoring="accuracy").mean())
- loo.append(skl.cross_validation.cross_val_score(my_lr11, X, y, cv=cv, scoring="accuracy").mean())
- loo.append(skl.cross_validation.cross_val_score(my_lr21, X, y, cv=cv, scoring="accuracy").mean())
- loo.append(skl.cross_validation.cross_val_score(my_lr, X, y, cv=cv, scoring="accuracy").mean())
- all_loo.append(loo)
- nf.append(str(num))
- res=np.round(np.array(all_loo), 3)
- colnames=['SVM with L1', 'SVM with L2', ' SVM ', 'LR with L1', 'LR with L2', ' LR ']
- restable=pandas.DataFrame(res, index=nf, columns=colnames)
- return restable
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement