Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python
- import numpy as np
- import matplotlib.pyplot as plt
- import matplotlib as mpl
- import time
- import itertools
- import pickle
- import IPython
- #from sklearn.mixture import gmm
- #from sklearn import mixture
- from sklearn import svm, datasets
- from sklearn.externals.six.moves import xrange
- from sklearn.mixture import GMM
- from scipy import linalg
- from mpl_toolkits.mplot3d import Axes3D
- from colorsys import rgb_to_hsv
- if __name__ == "__main__":
- HS = pickle.load(open("HS.p", "rb"));
- y = pickle.load(open("yHS.p", "rb"));
- #small subset here first
- X = HS
- X_tmp = X[0:9] + X[118:128] + X[245:255] + X[376:386]
- X = X_tmp
- y = y
- y_tmp = y[0:9] + y[118:128] + y[245:255] + y[376:386]
- y = y_tmp
- X_test = HS
- y_test = y
- X = np.array(X)
- y = np.array(y)
- n_classes = len(np.unique(y))
- #num_images_array = [118, 127, 131, 81]
- num_images_array = [10, 10, 10, 10]
- h = .02 # step size in the mesh
- # # we create an instance of SVM and fit out data. We do not scale our
- # # data since we want to plot the support vectors
- C = .1 # SVM regularization parameter
- #svc = svm.SVC(kernel='linear', C=C).fit(X, y)
- #rbf_svc = svm.SVC(kernel='rbf', gamma=0.7, C=C).fit(X, y)
- #poly_svc = svm.SVC(kernel='poly', degree=3, C=C).fit(X, y)
- lin_svc = svm.LinearSVC(C=C).fit(X, y)
- # # create a mesh to plot in
- x_min = X[:, 0].min() - 1
- x_max = X[:, 0].max() + 1
- y_min = X[:, 1].min() - 1
- y_max = X[:, 1].max() + 1
- xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
- np.arange(y_min, y_max, h))
- # title for the plots
- titles = ['LinearSVC (linear kernel)']
- # construct list for different SVMs to try
- #classifiers = (svc, rbf_svc, poly_svc, lin_svc)
- classifiers = (lin_svc)
- #n_classifiers = len(classifiers)
- n_classifiers = 1;
- # for i, clf in enumerate(classifiers):
- # # Since we have class labels for the training data, we can
- # # initialize the GMM parameters in a supervised manner.
- # classifier.means_ =np.array([]);
- # counter = 0;
- # for i in range(len(colors)):
- # if i == 0:
- # data = np.array(X_train[0:num_images_array[i]])
- # classifier.means_ = data.mean(axis=0)
- # else:
- # # print sum(num_images_array[:i-1])
- # # print sum(num_images_array[:i-1])+num_images_array[i]
- # data = np.array(X_train[counter:counter+num_images_array[i]])
- # classifier.means_=np.vstack((classifier.means_, data.mean(axis=0)))
- # counter = counter + num_images_array[i];
- # # Train the other parameters using the EM algorithm.
- # classifier.fit(X_train)
- # h = plt.subplot(2, n_classifiers / 2, index + 1)
- # make_ellipses(classifier, h, colors)
- # Plot the decision boundary. For that, we will assign a color to each
- # point in the mesh [x_min, m_max]x[y_min, y_max].
- #plt.subplot(2, 2, i + 1)
- #plt.subplots_adjust(wspace=0.4, hspace=0.4)
- clf = lin_svc
- Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
- # Put the result into a color plot
- Z = Z.reshape(xx.shape)
- plt.contourf(xx, yy, Z, cmap=plt.cm.Paired, alpha=0.8)
- # Plot also the training points
- plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Paired)
- plt.xlabel('H')
- plt.ylabel('S')
- plt.xlim(xx.min(), xx.max())
- plt.ylim(yy.min(), yy.max())
- plt.xticks(())
- plt.yticks(())
- plt.title(titles[i])
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement