Advertisement
Guest User

Untitled

a guest
Feb 25th, 2018
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.41 KB | None | 0 0
  1. % Load face and non-face data and plot a few examples
  2. load faces;
  3. load nonfaces;
  4. faces = double(faces);
  5. nonfaces = double(nonfaces);
  6.  
  7. figure(1);
  8. colormap gray;
  9. for k=1:25
  10. subplot(5,5,k), imagesc(faces(:,:,10*k));
  11. axis image;
  12. axis off;
  13. end
  14.  
  15. figure(2);
  16. colormap gray;
  17. for k=1:25
  18. subplot(5,5,k), imagesc(nonfaces(:,:,10*k));
  19. axis image;
  20. axis off;
  21. end
  22.  
  23. % Generate Haar feature masks
  24. nbrHaarFeatures = 100;
  25. haarFeatureMasks = GenerateHaarFeatureMasks(nbrHaarFeatures);
  26.  
  27. figure(3);
  28. colormap gray;
  29. for k = 1:25
  30. subplot(5,5,k),imagesc(haarFeatureMasks(:,:,k),[-1 2]);
  31. axis image;
  32. axis off;
  33. end
  34.  
  35.  
  36.  
  37. % Create a training data set with a number of training data examples
  38. % from each class. Non-faces = class label y=-1, faces = class label y=1
  39. nbrTrainExamples = 30;
  40. trainImages = cat(3,faces(:,:,1:nbrTrainExamples),nonfaces(:,:,1:nbrTrainExamples));
  41. xTrain = ExtractHaarFeatures(trainImages,haarFeatureMasks);
  42. yTrain = [ones(1,nbrTrainExamples), -ones(1,nbrTrainExamples)];
  43.  
  44.  
  45. %% Implement the AdaBoost training here
  46. % Use your implementation of WeakClassifier and WeakClassifierError
  47.  
  48. r = size(xTrain, 2);
  49. d = ones(1, r) / r;
  50. dSum = 0;
  51. nClf = 300;
  52. clfs = ones(4, nClf);
  53.  
  54. % TRAIN MODEL
  55. for n = 1:nClf
  56. %best model variables
  57. alpha = 0;
  58. errMin = 1;
  59. polarity = 0;
  60. threshold = 0;
  61. featureIndex = 0;
  62. h_min = 0;
  63. for haarFt = 1:nbrHaarFeatures
  64. for trainEx = 1:nbrTrainExamples*2
  65.  
  66. tao = xTrain(haarFt, trainEx);
  67. p = 1;
  68. h = WeakClassifier(tao, p, xTrain(haarFt,:));
  69. err = WeakClassifierError(h, d, yTrain);
  70.  
  71. if err > 0.5
  72. p = -1;
  73. err = 1 - err;
  74. end
  75.  
  76. if err < errMin
  77. polarity = p;
  78. threshold = tao;
  79. featureIndex = haarFt;
  80. errMin = err;
  81. % Alpha formula taken from lecture slides.
  82. alpha = 0.5*log((1 - errMin) / errMin);
  83. h_min = p*h;
  84. end
  85. end
  86. end
  87. % Check if clfs is correct or wrong for alpha.
  88. clfs(1:4, n) = [threshold, polarity, featureIndex, alpha];
  89. % Update the weights
  90. d = d.*exp(-alpha * yTrain .* h_min);
  91. % Check to avoid outlier problem.
  92. d(d>0.5) = 0.5;
  93. d = d ./ sum(d);
  94. end
  95.  
  96.  
  97. %% Extract test data
  98.  
  99. nbrTestExamples = 4000;
  100.  
  101. testImages = cat(3,faces(:,:,(nbrTrainExamples+1):(nbrTrainExamples+nbrTestExamples)),...
  102. nonfaces(:,:,(nbrTrainExamples+1):(nbrTrainExamples+nbrTestExamples)));
  103. xTest = ExtractHaarFeatures(testImages,haarFeatureMasks);
  104. yTest = [ones(1,nbrTestExamples), -ones(1,nbrTestExamples)];
  105.  
  106. %% Evaluate your strong classifier here
  107. % You can evaluate on the training data if you want, but you CANNOT use
  108. % this as a performance metric since it is biased. You MUST use the test
  109. % data to truly evaluate the strong classifier.
  110.  
  111.  
  112. % TEST MODEL
  113. nbrTestExamples = 4000;
  114. f = nbrTrainExamples;
  115. t = (nbrTrainExamples + nbrTestExamples);
  116. testImages = cat(3,faces(:,:,f:t),nonfaces(:,:,f:t));
  117. xTest = ExtractHaarFeatures(testImages,haarFeatureMasks);
  118. yTest = [ones(1,nbrTestExamples), -ones(1,nbrTestExamples)];
  119.  
  120. misclassified = [];
  121. correct = 0;
  122. for testEx = 1:nbrTestExamples
  123. s = 0;
  124. for c = 1:nClf
  125. threshold = clfs(1, c);
  126. polarity = clfs(2, c);
  127. featureIndex = clfs(3, c);
  128. alpha = clfs(4, c);
  129. x = xTest(featureIndex, testEx);
  130. h = 1;
  131. if polarity*x < polarity*threshold
  132. h = -1;
  133. end
  134. s = s + alpha*h;
  135. end
  136. y = yTest(testEx);
  137. if sign(s) == y
  138. correct = correct + 1;
  139. else
  140. misclassified = [misclassified, testEx];
  141. end
  142. end
  143.  
  144. accuracy = correct / nbrTestExamples;
  145.  
  146. accuracy
  147. numel(misclassified)
  148.  
  149. figure
  150. title('Training error with different number of weak classifiers.')
  151. xlabel('Number of weak classifiers')
  152. ylabel('Training Error')
  153. plot(clf_range, accuracies);
  154.  
  155.  
  156.  
  157. %% Plot the error of the strong classifier as function of the number of weak classifiers.
  158. % Note: you can find this error without re-training with a different
  159. % number of weak classifiers.
  160.  
  161. %for i = 1:size(nClf,1)
  162. % size(WeakClassifier(ts(i), ps(i), xTest(fs(i),:)))
  163. % size(as(i))
  164. % cs(i,:) = as(i) * WeakClassifier(ts(i), ps(i), xTest(fs(i),:));
  165. %end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement