Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python
- from sklearn.svm import SVC
- import numpy as np
- #Set up toy SVC problem
- X = np.random.random(size=(1000,2))
- Y = [1.0 if x1 + x2 > 1 else -1.0 for x1, x2 in X]
- C = 10e9
- #Calculate the constant term for linear SVC using alphas, support vectors, and weights
- clf = SVC(C=C, kernel='linear', verbose=True)
- clf.fit(X, Y)
- nSV = sum(clf.n_support_)
- b, soft = 0.0, 0.0
- for m in range(nSV):
- if np.abs(clf.dual_coef_[0][m]) < C:
- b += (Y[clf.support_[m]] - np.dot(X[clf.support_[m]], np.transpose(clf.coef_)))
- else:
- soft += 1
- b /= (nSV - soft)
- print "b = %f, clf.intercept_ = %f" % (b, clf.intercept_)
- #Calculate the constant term for polynomial SVC using alphas and support vectors
- coef0, Q = 1.0, 3.0
- Kpoly = lambda X, X_dash: (np.dot(X, X_dash) + coef0)**Q
- clf = SVC(C=C, kernel='poly', coef0=coef0, degree=Q, verbose=True)
- clf.fit(X, Y)
- nSV = sum(clf.n_support_)
- b, soft = 0.0, 0.0
- for m in range(nSV):
- if np.abs(clf.dual_coef_[0][m]) < C:
- b += (Y[clf.support_[m]] - np.dot(clf.dual_coef_,
- [Kpoly(sv, X[clf.support_[m]]) for sv in clf.support_vectors_]))
- else:
- soft += 1
- b /= (nSV - soft)
- print "b = %f, clf.intercept_ = %f" % (b, clf.intercept_)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement