Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import itertools
- import os
- import scipy.io as sio
- import numpy as np
- import matplotlib.pyplot as plt
- from sklearn import svm
- from sklearn.metrics import confusion_matrix
- from plot_confusion_matrix import plot_confusion_matrix
- rootdir ='data'
- samples = [] # data
- y = [] # labels
- X = [] # features
- isTrain = [] # test (0) or train (1) sample
- # --- read data
- for subdir, dirs, files in os.walk(rootdir):
- for file in files:
- data = sio.loadmat(rootdir + '/' + file)['d_skel']
- samples.append(data)
- action = int(file.split('_')[0][1:])
- y.append(action)
- subject = int(file.split('_')[1][1:])
- isTrain.append(subject % 2)
- def for_dimension(vector):
- mean = np.mean(vector)
- rms = np.sqrt(np.mean(vector ** 2))
- vel = sum(abs(np.diff(vector)))
- acc = sum(abs(np.diff(np.diff(vector))))
- return [mean, rms, vel, acc]
- # a = [element for element in itertools.product([mean, rms], [vel, acc])]
- # return list(itertools.chain(*a))
- # --- compute features
- for sample in samples:
- features = [] # 20 joints in data
- for i in range(0, 20): # i - joints
- xxx = for_dimension(sample[i, 0, :40])
- yyy = for_dimension(sample[i, 1, :40])
- zzz = for_dimension(sample[i, 2, :40])
- xxx2 = for_dimension(sample[i, 0, 30:])
- yyy2 = for_dimension(sample[i, 1, 30:])
- zzz2 = for_dimension(sample[i, 2, 30:])
- features.append(xxx + yyy + zzz + xxx2 + yyy2 + zzz2)
- features = [f1 for f2 in features for f1 in f2]
- features /= sum(features)
- X.append(features)
- # --- plot
- for idx in range(0,9):
- plt.subplot(331 + idx)
- plt.bar(np.arange(0,240 * 2), X[idx * 16])
- plt.show()
- # --- convert to arrays and split to train and test data
- isTrain = np.asarray(isTrain)
- X = np.asarray(X)
- y = np.asarray(y)
- X_train = X[(isTrain == 1)]
- X_test = X[(isTrain == 0)]
- y_train = y[(isTrain == 1)]
- y_test = y[(isTrain == 0)]
- # --- classify
- clf = svm.SVC(gamma='scale')
- clf.fit(X_train, y_train)
- results = clf.predict(X_test)
- # print(results)
- incorrect = len(np.nonzero(results-y_test)[0])
- total = len(y_test)
- correct = total - incorrect
- accuracy = correct / total
- print(accuracy)
- # --- confusion matrix
- cm = confusion_matrix(y_test, results)
- # print(cm)
- classes = np.arange(0, 27)
- plot_confusion_matrix(y_test, results, classes=classes, normalize=True, title='Confusion matrix')
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement