Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- % Load face and non-face data and plot a few examples
- load faces;
- load nonfaces;
- faces = double(faces);
- nonfaces = double(nonfaces);
- figure(1);
- colormap gray;
- for k=1:25
- subplot(5,5,k), imagesc(faces(:,:,10*k));
- axis image;
- axis off;
- end
- figure(2);
- colormap gray;
- for k=1:25
- subplot(5,5,k), imagesc(nonfaces(:,:,10*k));
- axis image;
- axis off;
- end
- % Generate Haar feature masks
- nbrHaarFeatures = 100;
- haarFeatureMasks = GenerateHaarFeatureMasks(nbrHaarFeatures);
- figure(3);
- colormap gray;
- for k = 1:25
- subplot(5,5,k),imagesc(haarFeatureMasks(:,:,k),[-1 2]);
- axis image;
- axis off;
- end
- % Create a training data set with a number of training data examples
- % from each class. Non-faces = class label y=-1, faces = class label y=1
- nbrTrainExamples = 30;
- trainImages = cat(3,faces(:,:,1:nbrTrainExamples),nonfaces(:,:,1:nbrTrainExamples));
- xTrain = ExtractHaarFeatures(trainImages,haarFeatureMasks);
- yTrain = [ones(1,nbrTrainExamples), -ones(1,nbrTrainExamples)];
- %% Implement the AdaBoost training here
- % Use your implementation of WeakClassifier and WeakClassifierError
- r = size(xTrain, 2);
- d = ones(1, r) / r;
- dSum = 0;
- nClf = 300;
- clfs = ones(4, nClf);
- % TRAIN MODEL
- for n = 1:nClf
- %best model variables
- alpha = 0;
- errMin = 1;
- polarity = 0;
- threshold = 0;
- featureIndex = 0;
- h_min = 0;
- for haarFt = 1:nbrHaarFeatures
- for trainEx = 1:nbrTrainExamples*2
- tao = xTrain(haarFt, trainEx);
- p = 1;
- h = WeakClassifier(tao, p, xTrain(haarFt,:));
- err = WeakClassifierError(h, d, yTrain);
- if err > 0.5
- p = -1;
- err = 1 - err;
- end
- if err < errMin
- polarity = p;
- threshold = tao;
- featureIndex = haarFt;
- errMin = err;
- % Alpha formula taken from lecture slides.
- alpha = 0.5*log((1 - errMin) / errMin);
- h_min = p*h;
- end
- end
- end
- % Check if clfs is correct or wrong for alpha.
- clfs(1:4, n) = [threshold, polarity, featureIndex, alpha];
- % Update the weights
- d = d.*exp(-alpha * yTrain .* h_min);
- % Check to avoid outlier problem.
- d(d>0.5) = 0.5;
- d = d ./ sum(d);
- end
- %% Extract test data
- nbrTestExamples = 4000;
- testImages = cat(3,faces(:,:,(nbrTrainExamples+1):(nbrTrainExamples+nbrTestExamples)),...
- nonfaces(:,:,(nbrTrainExamples+1):(nbrTrainExamples+nbrTestExamples)));
- xTest = ExtractHaarFeatures(testImages,haarFeatureMasks);
- yTest = [ones(1,nbrTestExamples), -ones(1,nbrTestExamples)];
- %% Evaluate your strong classifier here
- % You can evaluate on the training data if you want, but you CANNOT use
- % this as a performance metric since it is biased. You MUST use the test
- % data to truly evaluate the strong classifier.
- % TEST MODEL
- nbrTestExamples = 4000;
- f = nbrTrainExamples;
- t = (nbrTrainExamples + nbrTestExamples);
- testImages = cat(3,faces(:,:,f:t),nonfaces(:,:,f:t));
- xTest = ExtractHaarFeatures(testImages,haarFeatureMasks);
- yTest = [ones(1,nbrTestExamples), -ones(1,nbrTestExamples)];
- misclassified = [];
- correct = 0;
- for testEx = 1:nbrTestExamples
- s = 0;
- for c = 1:nClf
- threshold = clfs(1, c);
- polarity = clfs(2, c);
- featureIndex = clfs(3, c);
- alpha = clfs(4, c);
- x = xTest(featureIndex, testEx);
- h = 1;
- if polarity*x < polarity*threshold
- h = -1;
- end
- s = s + alpha*h;
- end
- y = yTest(testEx);
- if sign(s) == y
- correct = correct + 1;
- else
- misclassified = [misclassified, testEx];
- end
- end
- accuracy = correct / nbrTestExamples;
- accuracy
- numel(misclassified)
- figure
- title('Training error with different number of weak classifiers.')
- xlabel('Number of weak classifiers')
- ylabel('Training Error')
- plot(clf_range, accuracies);
- %% Plot the error of the strong classifier as function of the number of weak classifiers.
- % Note: you can find this error without re-training with a different
- % number of weak classifiers.
- %for i = 1:size(nClf,1)
- % size(WeakClassifier(ts(i), ps(i), xTest(fs(i),:)))
- % size(as(i))
- % cs(i,:) = as(i) * WeakClassifier(ts(i), ps(i), xTest(fs(i),:));
- %end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement